[
{
    "branch": "main",
    "index": "0",
    "phase": "pre",
    "playbook": "github.com/vexxhost/zuul-config/playbooks/base/pre.yaml",
    "plays": [
        {
            "play": {
                "duration": {
                    "end": "2026-03-01T21:03:12.557250Z",
                    "start": "2026-03-01T21:03:09.984423Z"
                },
                "id": "0242ac17-0011-7fd7-c28d-000000000002",
                "name": "localhost"
            },
            "tasks": [
                {
                    "hosts": {
                        "localhost": {
                            "action": "gather_facts",
                            "ansible_facts": {
                                "ansible_apparmor": {
                                    "status": "disabled"
                                },
                                "ansible_architecture": "x86_64",
                                "ansible_bios_date": "NA",
                                "ansible_bios_vendor": "NA",
                                "ansible_bios_version": "NA",
                                "ansible_board_asset_tag": "NA",
                                "ansible_board_name": "NA",
                                "ansible_board_serial": "NA",
                                "ansible_board_vendor": "NA",
                                "ansible_board_version": "NA",
                                "ansible_chassis_asset_tag": "NA",
                                "ansible_chassis_serial": "NA",
                                "ansible_chassis_vendor": "NA",
                                "ansible_chassis_version": "NA",
                                "ansible_cmdline": {
                                    "BOOT_IMAGE": "/boot/vmlinuz-5.15.0-130-generic",
                                    "console": "ttyS0",
                                    "ro": true,
                                    "root": "UUID=5a569d86-b935-46dd-ae79-7a72a25b6a4c"
                                },
                                "ansible_date_time": {
                                    "date": "2026-03-01",
                                    "day": "01",
                                    "epoch": "1772398990",
                                    "epoch_int": "1772398990",
                                    "hour": "21",
                                    "iso8601": "2026-03-01T21:03:10Z",
                                    "iso8601_basic": "20260301T210310871145",
                                    "iso8601_basic_short": "20260301T210310",
                                    "iso8601_micro": "2026-03-01T21:03:10.871145Z",
                                    "minute": "03",
                                    "month": "03",
                                    "second": "10",
                                    "time": "21:03:10",
                                    "tz": "UTC",
                                    "tz_dst": "UTC",
                                    "tz_offset": "+0000",
                                    "weekday": "Sunday",
                                    "weekday_number": "0",
                                    "weeknumber": "08",
                                    "year": "2026"
                                },
                                "ansible_devices": {},
                                "ansible_distribution": "Debian",
                                "ansible_distribution_major_version": "12",
                                "ansible_distribution_release": "bookworm",
                                "ansible_distribution_version": "12",
                                "ansible_dns": {
                                    "nameservers": [
                                        "127.0.0.11"
                                    ],
                                    "options": {
                                        "edns0": true,
                                        "ndots": "0",
                                        "trust-ad": true
                                    },
                                    "search": [
                                        "openstacklocal"
                                    ]
                                },
                                "ansible_domain": "",
                                "ansible_effective_group_id": 0,
                                "ansible_effective_user_id": 0,
                                "ansible_env": {
                                    "ANSIBLE_CONFIG": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/ansible/pre_playbook_0/ansible.cfg",
                                    "DEBIAN_FRONTEND": "noninteractive",
                                    "GPG_KEY": "A035C8C19219BA821ECEA86B64E628F8D684696D",
                                    "HOME": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work",
                                    "HOSTNAME": "0a8996d2b663",
                                    "LANG": "C.UTF-8",
                                    "PATH": "/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
                                    "PWD": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_0/github.com/vexxhost/zuul-config/playbooks/base",
                                    "PYTHONPATH": "/var/lib/zuul/ansible/9",
                                    "PYTHON_SHA256": "8d3ed8ec5c88c1c95f5e558612a725450d2452813ddad5e58fdb1a53b1209b78",
                                    "PYTHON_VERSION": "3.11.14",
                                    "SSH_AGENT_PID": "1902047",
                                    "SSH_AUTH_SOCK": "/tmp/ssh-X4mEfRrM5sec/agent.1902046",
                                    "TMP": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/tmp",
                                    "ZUUL_ANSIBLE_SPLIT_STREAMS": "False",
                                    "ZUUL_JOBDIR": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88",
                                    "ZUUL_JOB_FAILURE_OUTPUT": "[]",
                                    "ZUUL_JOB_LOG_CONFIG": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/ansible/logging.json",
                                    "ZUUL_OUTPUT_MAX_BYTES": "1073741824"
                                },
                                "ansible_fibre_channel_wwn": [],
                                "ansible_fips": false,
                                "ansible_form_factor": "NA",
                                "ansible_fqdn": "0a8996d2b663",
                                "ansible_hostname": "0a8996d2b663",
                                "ansible_hostnqn": "",
                                "ansible_is_chroot": false,
                                "ansible_iscsi_iqn": "",
                                "ansible_kernel": "5.15.0-130-generic",
                                "ansible_kernel_version": "#140-Ubuntu SMP Wed Dec 18 17:59:53 UTC 2024",
                                "ansible_loadavg": {
                                    "15m": 2.27001953125,
                                    "1m": 5.1142578125,
                                    "5m": 2.83251953125
                                },
                                "ansible_local": {},
                                "ansible_lsb": {},
                                "ansible_lvm": "N/A",
                                "ansible_machine": "x86_64",
                                "ansible_memfree_mb": 556,
                                "ansible_memory_mb": {
                                    "nocache": {
                                        "free": 17357,
                                        "used": 14733
                                    },
                                    "real": {
                                        "free": 556,
                                        "total": 32090,
                                        "used": 31534
                                    },
                                    "swap": {
                                        "cached": 0,
                                        "free": 0,
                                        "total": 0,
                                        "used": 0
                                    }
                                },
                                "ansible_memtotal_mb": 32090,
                                "ansible_mounts": [
                                    {
                                        "block_available": 8242676,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 32358904,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16638174,
                                        "inode_total": 20643840,
                                        "inode_used": 4005666,
                                        "mount": "/etc/resolv.conf",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 33762000896,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 8242663,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 32358917,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16638174,
                                        "inode_total": 20643840,
                                        "inode_used": 4005666,
                                        "mount": "/etc/hosts",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 33761947648,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 8242660,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 32358920,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16638174,
                                        "inode_total": 20643840,
                                        "inode_used": 4005666,
                                        "mount": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work",
                                        "options": "rw,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 33761935360,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 8242630,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 32358950,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16638174,
                                        "inode_total": 20643840,
                                        "inode_used": 4005666,
                                        "mount": "/var/lib/zuul/ansible/9",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 33761812480,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 8242628,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 32358952,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16638174,
                                        "inode_total": 20643840,
                                        "inode_used": 4005666,
                                        "mount": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/ansible",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 33761804288,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 8242628,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 32358952,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16638174,
                                        "inode_total": 20643840,
                                        "inode_used": 4005666,
                                        "mount": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 33761804288,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 8242628,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 32358952,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16638174,
                                        "inode_total": 20643840,
                                        "inode_used": 4005666,
                                        "mount": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 33761804288,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 8242628,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 32358952,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16638174,
                                        "inode_total": 20643840,
                                        "inode_used": 4005666,
                                        "mount": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/ansible/pre_playbook_0",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 33761804288,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 8242628,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 32358952,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16638174,
                                        "inode_total": 20643840,
                                        "inode_used": 4005666,
                                        "mount": "/etc/zuul/site-variables.yaml",
                                        "options": "ro,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 33761804288,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 228165290,
                                        "block_size": 4096,
                                        "block_total": 263940717,
                                        "block_used": 35775427,
                                        "device": "/dev/vdb",
                                        "fstype": "ext4",
                                        "inode_available": 64740976,
                                        "inode_total": 67108864,
                                        "inode_used": 2367888,
                                        "mount": "/srv/static/logs",
                                        "options": "rw,nosuid,nodev,relatime,discard",
                                        "size_available": 934565027840,
                                        "size_total": 1081101176832,
                                        "uuid": "N/A"
                                    },
                                    {
                                        "block_available": 8242628,
                                        "block_size": 4096,
                                        "block_total": 40601580,
                                        "block_used": 32358952,
                                        "device": "/dev/vda1",
                                        "fstype": "ext4",
                                        "inode_available": 16638174,
                                        "inode_total": 20643840,
                                        "inode_used": 4005666,
                                        "mount": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/.ansible",
                                        "options": "rw,nosuid,nodev,relatime,discard,errors=remount-ro,bind",
                                        "size_available": 33761804288,
                                        "size_total": 166304071680,
                                        "uuid": "N/A"
                                    }
                                ],
                                "ansible_nodename": "0a8996d2b663",
                                "ansible_os_family": "Debian",
                                "ansible_pkg_mgr": "apt",
                                "ansible_proc_cmdline": {
                                    "BOOT_IMAGE": "/boot/vmlinuz-5.15.0-130-generic",
                                    "console": [
                                        "tty1",
                                        "ttyS0"
                                    ],
                                    "ro": true,
                                    "root": "UUID=5a569d86-b935-46dd-ae79-7a72a25b6a4c"
                                },
                                "ansible_processor": [
                                    "0",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "1",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "2",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "3",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "4",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "5",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "6",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "7",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "8",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "9",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "10",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "11",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "12",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "13",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "14",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor",
                                    "15",
                                    "AuthenticAMD",
                                    "AMD EPYC-Rome Processor"
                                ],
                                "ansible_processor_cores": 1,
                                "ansible_processor_count": 16,
                                "ansible_processor_nproc": 16,
                                "ansible_processor_threads_per_core": 1,
                                "ansible_processor_vcpus": 16,
                                "ansible_product_name": "NA",
                                "ansible_product_serial": "NA",
                                "ansible_product_uuid": "NA",
                                "ansible_product_version": "NA",
                                "ansible_python": {
                                    "executable": "/usr/local/lib/zuul/ansible/9/bin/python",
                                    "has_sslcontext": true,
                                    "type": "cpython",
                                    "version": {
                                        "major": 3,
                                        "micro": 14,
                                        "minor": 11,
                                        "releaselevel": "final",
                                        "serial": 0
                                    },
                                    "version_info": [
                                        3,
                                        11,
                                        14,
                                        "final",
                                        0
                                    ]
                                },
                                "ansible_python_version": "3.11.14",
                                "ansible_real_group_id": 0,
                                "ansible_real_user_id": 0,
                                "ansible_selinux": {
                                    "status": "disabled"
                                },
                                "ansible_selinux_python_present": true,
                                "ansible_service_mgr": "bwrap",
                                "ansible_swapfree_mb": 0,
                                "ansible_swaptotal_mb": 0,
                                "ansible_system": "Linux",
                                "ansible_system_capabilities": "N/A",
                                "ansible_system_capabilities_enforced": "N/A",
                                "ansible_system_vendor": "NA",
                                "ansible_uptime_seconds": 36062720,
                                "ansible_user_dir": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work",
                                "ansible_user_gecos": "root",
                                "ansible_user_gid": 0,
                                "ansible_user_id": "root",
                                "ansible_user_shell": "/bin/bash",
                                "ansible_user_uid": 0,
                                "ansible_userspace_architecture": "x86_64",
                                "ansible_userspace_bits": "64",
                                "ansible_virtualization_role": "host",
                                "ansible_virtualization_tech_guest": [],
                                "ansible_virtualization_tech_host": [
                                    "kvm"
                                ],
                                "ansible_virtualization_type": "kvm",
                                "gather_subset": [
                                    "all"
                                ],
                                "module_setup": true
                            },
                            "changed": false,
                            "deprecations": [],
                            "warnings": []
                        }
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:10.946850Z",
                            "start": "2026-03-01T21:03:09.995795Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-00000000002a",
                        "name": "Gathering Facts"
                    }
                },
                {
                    "hosts": {
                        "localhost": {
                            "action": "include_role",
                            "changed": false,
                            "include_args": {
                                "name": "set-zuul-log-path-fact"
                            }
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000006",
                        "name": "emit-job-header",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/emit-job-header"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:10.973407Z",
                            "start": "2026-03-01T21:03:10.951507Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-000000000008",
                        "name": "Setup log path fact"
                    }
                },
                {
                    "hosts": {
                        "localhost": {
                            "action": "set_fact",
                            "ansible_facts": {
                                "zuul_log_path": "545/oss/5458773516a34c28bdbe6e02248c9f88"
                            },
                            "changed": false
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000054",
                        "name": "set-zuul-log-path-fact",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/set-zuul-log-path-fact"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:11.029457Z",
                            "start": "2026-03-01T21:03:10.986270Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-000000000056",
                        "name": "Set log path for a build"
                    }
                },
                {
                    "hosts": {
                        "localhost": {
                            "action": "debug",
                            "changed": false,
                            "msg": "# Job Information\nAnsible Version: 2.16.16\nJob: atmosphere-molecule-aio-ovn\nPipeline: check\nExecutor: 0a8996d2b663\nTriggered by: https://github.com/vexxhost/atmosphere/pull/3589\nEvent ID: eac304c0-15b1-11f1-9542-260794e7bde7\n"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000006",
                        "name": "emit-job-header",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/emit-job-header"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:11.086540Z",
                            "start": "2026-03-01T21:03:11.038498Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-000000000009",
                        "name": "Print job information"
                    }
                },
                {
                    "hosts": {
                        "localhost": {
                            "action": "debug",
                            "changed": false,
                            "msg": "All items completed",
                            "results": [
                                {
                                    "ansible_loop_var": "zj_item",
                                    "changed": false,
                                    "failed": false,
                                    "msg": "# Node Information\nInventory Hostname: instance\nHostname: np0000157528\nUsername: zuul\nDistro: Ubuntu 22.04\nProvider: yul1\nRegion: ca-ymq-1\nLabel: ubuntu-jammy-16\nProduct Name: OpenStack Nova\nInterface IP: 199.204.45.156\n",
                                    "zj_item": "instance"
                                }
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000006",
                        "name": "emit-job-header",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/emit-job-header"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:11.189486Z",
                            "start": "2026-03-01T21:03:11.089930Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-00000000000a",
                        "name": "Print node information"
                    }
                },
                {
                    "hosts": {
                        "localhost": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/zuul-info",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/zuul-info",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": 493,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/zuul-info",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/zuul-info",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-00000000000d",
                        "name": "log-inventory",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/log-inventory"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:11.644997Z",
                            "start": "2026-03-01T21:03:11.199989Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-00000000000f",
                        "name": "Ensure Zuul Ansible directory exists"
                    }
                },
                {
                    "hosts": {
                        "localhost": {
                            "action": "copy",
                            "changed": true,
                            "checksum": "a27056f9bc83510c10e2adecf43ab02ad53f4aa5",
                            "dest": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/zuul-info/inventory.yaml",
                            "diff": [],
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_original_basename": "inventory.yaml",
                                    "attributes": null,
                                    "backup": false,
                                    "checksum": "a27056f9bc83510c10e2adecf43ab02ad53f4aa5",
                                    "content": null,
                                    "dest": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/zuul-info",
                                    "directory_mode": null,
                                    "follow": false,
                                    "force": true,
                                    "group": null,
                                    "local_follow": null,
                                    "mode": 420,
                                    "owner": null,
                                    "remote_src": null,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/.ansible/tmp/ansible-tmp-1772398991.67765-84-120415152043629/source",
                                    "unsafe_writes": false,
                                    "validate": null
                                }
                            },
                            "md5sum": "7b5fd65be8cba4da524e06989b53bf24",
                            "mode": "0644",
                            "owner": "root",
                            "size": 41631,
                            "src": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/.ansible/tmp/ansible-tmp-1772398991.67765-84-120415152043629/source",
                            "state": "file",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-00000000000d",
                        "name": "log-inventory",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/log-inventory"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:12.557250Z",
                            "start": "2026-03-01T21:03:11.650052Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-000000000010",
                        "name": "Copy ansible inventory to logs dir"
                    }
                }
            ]
        },
        {
            "play": {
                "duration": {
                    "end": "2026-03-01T21:03:20.020112Z",
                    "start": "2026-03-01T21:03:12.564561Z"
                },
                "id": "0242ac17-0011-7fd7-c28d-000000000011",
                "name": "all"
            },
            "tasks": [
                {
                    "hosts": {
                        "instance": {
                            "action": "stat",
                            "changed": false,
                            "failed_when_result": false,
                            "invocation": {
                                "module_args": {
                                    "checksum_algorithm": "sha1",
                                    "follow": false,
                                    "get_attributes": true,
                                    "get_checksum": true,
                                    "get_mime": true,
                                    "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/5458773516a34c28bdbe6e02248c9f88_id_rsa"
                                }
                            },
                            "stat": {
                                "exists": false
                            }
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:12.770045Z",
                            "start": "2026-03-01T21:03:12.573289Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-000000000018",
                        "name": "Check to see if ssh key was already created for this build"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "include_tasks",
                            "changed": false,
                            "include": "create-key-and-replace.yaml",
                            "include_args": {}
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:12.809873Z",
                            "start": "2026-03-01T21:03:12.776087Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-000000000019",
                        "name": "Create a new key in workspace based on build UUID"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "command",
                            "changed": true,
                            "cmd": [
                                "ssh-keygen",
                                "-t",
                                "rsa",
                                "-N",
                                "",
                                "-C",
                                "zuul-build-sshkey",
                                "-f",
                                "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/5458773516a34c28bdbe6e02248c9f88_id_rsa",
                                "-b",
                                "3072"
                            ],
                            "delta": "0:00:01.083370",
                            "end": "2026-03-01 21:03:14.265268",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "ssh-keygen -t rsa -N '' -C 'zuul-build-sshkey' -f /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/5458773516a34c28bdbe6e02248c9f88_id_rsa -b 3072",
                                    "_uses_shell": false,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": null,
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0011-7fd7-c28d-00000000009c-0-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-03-01 21:03:13.181898",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "Generating public/private rsa key pair.\nYour identification has been saved in /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/5458773516a34c28bdbe6e02248c9f88_id_rsa\nYour public key has been saved in /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/5458773516a34c28bdbe6e02248c9f88_id_rsa.pub\nThe key fingerprint is:\nSHA256:9ODf2CiOQTl4zrWnqdCkjNmRCTERt6YEPT804egTWyU zuul-build-sshkey\nThe key's randomart image is:\n+---[RSA 3072]----+\n|..=oE..          |\n| .o*o+           |\n|  =+=.  o        |\n| o Bo+ + o       |\n|  = =.* S .      |\n|   * X o o =     |\n|  o = = o = o    |\n|     . + =       |\n|      o.+        |\n+----[SHA256]-----+",
                            "stdout_lines": [
                                "Generating public/private rsa key pair.",
                                "Your identification has been saved in /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/5458773516a34c28bdbe6e02248c9f88_id_rsa",
                                "Your public key has been saved in /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/5458773516a34c28bdbe6e02248c9f88_id_rsa.pub",
                                "The key fingerprint is:",
                                "SHA256:9ODf2CiOQTl4zrWnqdCkjNmRCTERt6YEPT804egTWyU zuul-build-sshkey",
                                "The key's randomart image is:",
                                "+---[RSA 3072]----+",
                                "|..=oE..          |",
                                "| .o*o+           |",
                                "|  =+=.  o        |",
                                "| o Bo+ + o       |",
                                "|  = =.* S .      |",
                                "|   * X o o =     |",
                                "|  o = = o = o    |",
                                "|     . + =       |",
                                "|      o.+        |",
                                "+----[SHA256]-----+"
                            ],
                            "zuul_log_id": "0242ac17-0011-7fd7-c28d-00000000009c-0-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:14.306275Z",
                            "start": "2026-03-01T21:03:12.838215Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-00000000009c",
                        "name": "Create Temp SSH key"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "include_tasks",
                            "changed": false,
                            "include": "remote-linux.yaml",
                            "include_args": {}
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:14.348817Z",
                            "start": "2026-03-01T21:03:14.312185Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-00000000009d",
                        "name": "Remote setup ssh keys (linux)"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "lineinfile",
                            "changed": false,
                            "false_condition": "zuul_build_sshkey_cleanup",
                            "skip_reason": "Conditional result was False",
                            "skipped": true
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:14.413348Z",
                            "start": "2026-03-01T21:03:14.366792Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-0000000000bc",
                        "name": "Remove previously added zuul-build-sshkey"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "authorized_key",
                            "changed": true,
                            "comment": null,
                            "exclusive": false,
                            "follow": false,
                            "invocation": {
                                "module_args": {
                                    "changed": true,
                                    "comment": null,
                                    "exclusive": false,
                                    "follow": false,
                                    "key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDPfwECylRYVh2wnK/+KlS5o86JBjkIQyEEYbI2SOozJMFOPh426hBYiUkfbWWE5thTsWfbU3n3LqHWL1qxx1aBSMkVyFVWoO8IlpkIHvNL/EKBIzI0BF7FQ1uNfOXz7CV17FqM34DkbVH0HLbT5rO+94slg28bFZhXcuezQG1XqqlqsVsChsYX/ovkqP7spOQ+S2Ne5CQq3i8xYdzSR4xA024sG2xW0oP0v1Chwfds0ZOecgSjEQalsrbQgWfRLb4SjAPc94AK5fIga5HwIcDKIvXuajxEqagCq7mlnsIbPfXY0NOCxB1FQ/Fr0Qun+dhr5Pa6VwcL0FNQ6COC0LTmlCfCwNP1ELOhyPXzumFXrGqrWPzKrGUMQ+nuPoPgKohkW0rN+YbrG2QeTzUqYfdkwS5dBNZmr10jZKI7WyFAf3dM8GWdDONDxWx4owIyqtBSrvObBZUzXkYqzY9XXBeXPdvbKKDIiFKVsAAnSWIVBrc0uCKNwjbXPZs931C4noc= zuul-build-sshkey",
                                    "key_options": null,
                                    "keyfile": "/home/zuul/.ssh/authorized_keys",
                                    "manage_dir": true,
                                    "path": null,
                                    "state": "present",
                                    "user": "zuul",
                                    "validate_certs": true
                                }
                            },
                            "key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDPfwECylRYVh2wnK/+KlS5o86JBjkIQyEEYbI2SOozJMFOPh426hBYiUkfbWWE5thTsWfbU3n3LqHWL1qxx1aBSMkVyFVWoO8IlpkIHvNL/EKBIzI0BF7FQ1uNfOXz7CV17FqM34DkbVH0HLbT5rO+94slg28bFZhXcuezQG1XqqlqsVsChsYX/ovkqP7spOQ+S2Ne5CQq3i8xYdzSR4xA024sG2xW0oP0v1Chwfds0ZOecgSjEQalsrbQgWfRLb4SjAPc94AK5fIga5HwIcDKIvXuajxEqagCq7mlnsIbPfXY0NOCxB1FQ/Fr0Qun+dhr5Pa6VwcL0FNQ6COC0LTmlCfCwNP1ELOhyPXzumFXrGqrWPzKrGUMQ+nuPoPgKohkW0rN+YbrG2QeTzUqYfdkwS5dBNZmr10jZKI7WyFAf3dM8GWdDONDxWx4owIyqtBSrvObBZUzXkYqzY9XXBeXPdvbKKDIiFKVsAAnSWIVBrc0uCKNwjbXPZs931C4noc= zuul-build-sshkey",
                            "key_options": null,
                            "keyfile": "/home/zuul/.ssh/authorized_keys",
                            "manage_dir": true,
                            "path": null,
                            "state": "present",
                            "user": "zuul",
                            "validate_certs": true
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:14.890663Z",
                            "start": "2026-03-01T21:03:14.423079Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-0000000000bd",
                        "name": "Enable access via build key on all nodes"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": false,
                            "diff": {
                                "after": {
                                    "path": "/home/zuul/.ssh"
                                },
                                "before": {
                                    "path": "/home/zuul/.ssh"
                                }
                            },
                            "gid": 1000,
                            "group": "zuul",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": 448,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/home/zuul/.ssh",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0700",
                            "owner": "zuul",
                            "path": "/home/zuul/.ssh",
                            "size": 4096,
                            "state": "directory",
                            "uid": 1000
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:15.071788Z",
                            "start": "2026-03-01T21:03:14.895647Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-0000000000be",
                        "name": "Make sure user has a .ssh"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "copy",
                            "changed": true,
                            "checksum": "09bb13637d773ba689639bab228a574092c0a5dc",
                            "dest": "/home/zuul/.ssh/id_rsa",
                            "diff": [],
                            "gid": 1000,
                            "group": "zuul",
                            "invocation": {
                                "module_args": {
                                    "_original_basename": "5458773516a34c28bdbe6e02248c9f88_id_rsa",
                                    "attributes": null,
                                    "backup": false,
                                    "checksum": "09bb13637d773ba689639bab228a574092c0a5dc",
                                    "content": null,
                                    "dest": "/home/zuul/.ssh/id_rsa",
                                    "directory_mode": null,
                                    "follow": false,
                                    "force": false,
                                    "group": null,
                                    "local_follow": null,
                                    "mode": 384,
                                    "owner": null,
                                    "remote_src": null,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": "/home/zuul/.ansible/tmp/ansible-tmp-1772398995.1181717-190-160006381103818/source",
                                    "unsafe_writes": false,
                                    "validate": null
                                }
                            },
                            "md5sum": "8e83131e76144f0e58dd3508899bed63",
                            "mode": "0600",
                            "owner": "zuul",
                            "size": 2602,
                            "src": "/home/zuul/.ansible/tmp/ansible-tmp-1772398995.1181717-190-160006381103818/source",
                            "state": "file",
                            "uid": 1000
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:15.539047Z",
                            "start": "2026-03-01T21:03:15.076676Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-0000000000bf",
                        "name": "Install build private key as SSH key on all nodes"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "copy",
                            "changed": true,
                            "checksum": "7f55725fcbf09d114a717b7362fb48739e327ba5",
                            "dest": "/home/zuul/.ssh/id_rsa.pub",
                            "diff": [],
                            "gid": 1000,
                            "group": "zuul",
                            "invocation": {
                                "module_args": {
                                    "_original_basename": "5458773516a34c28bdbe6e02248c9f88_id_rsa.pub",
                                    "attributes": null,
                                    "backup": false,
                                    "checksum": "7f55725fcbf09d114a717b7362fb48739e327ba5",
                                    "content": null,
                                    "dest": "/home/zuul/.ssh/id_rsa.pub",
                                    "directory_mode": null,
                                    "follow": false,
                                    "force": false,
                                    "group": null,
                                    "local_follow": null,
                                    "mode": 420,
                                    "owner": null,
                                    "remote_src": null,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": "/home/zuul/.ansible/tmp/ansible-tmp-1772398995.5945299-200-80790806495611/source",
                                    "unsafe_writes": false,
                                    "validate": null
                                }
                            },
                            "md5sum": "69f58589322e0fa7782176e1e97e27bf",
                            "mode": "0644",
                            "owner": "zuul",
                            "size": 571,
                            "src": "/home/zuul/.ansible/tmp/ansible-tmp-1772398995.5945299-200-80790806495611/source",
                            "state": "file",
                            "uid": 1000
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:16.025143Z",
                            "start": "2026-03-01T21:03:15.554667Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-0000000000c0",
                        "name": "Install build public key as SSH key on all nodes"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "include_tasks",
                            "changed": false,
                            "false_condition": "ansible_os_family == \"Windows\"",
                            "skip_reason": "Conditional result was False",
                            "skipped": true
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:16.055036Z",
                            "start": "2026-03-01T21:03:16.030674Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-00000000009e",
                        "name": "Remote setup ssh keys (windows)"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "sshagent_remove_keys",
                            "changed": true,
                            "invocation": {
                                "module_args": {
                                    "remove": "^(?!\\(stdin\\)).*"
                                }
                            },
                            "removed": [
                                "/etc/zuul/id_rsa"
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-0000000000a3",
                        "name": "remove-zuul-sshkey",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/remove-zuul-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:16.407562Z",
                            "start": "2026-03-01T21:03:16.065492Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-0000000000a5",
                        "name": "Remove master key from local agent"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "command",
                            "changed": true,
                            "cmd": [
                                "ssh-add",
                                "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/5458773516a34c28bdbe6e02248c9f88_id_rsa"
                            ],
                            "delta": "0:00:00.014630",
                            "end": "2026-03-01 21:03:16.733477",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "ssh-add /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/5458773516a34c28bdbe6e02248c9f88_id_rsa",
                                    "_uses_shell": false,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": null,
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0011-7fd7-c28d-0000000000ab-0-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-03-01 21:03:16.718847",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "Identity added: /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/5458773516a34c28bdbe6e02248c9f88_id_rsa (zuul-build-sshkey)",
                            "stdout_lines": [
                                "Identity added: /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/5458773516a34c28bdbe6e02248c9f88_id_rsa (zuul-build-sshkey)"
                            ],
                            "zuul_log_id": "0242ac17-0011-7fd7-c28d-0000000000ab-0-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:16.814656Z",
                            "start": "2026-03-01T21:03:16.457434Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-0000000000ab",
                        "name": "Add back temp key"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "ping",
                            "changed": false,
                            "invocation": {
                                "module_args": {
                                    "data": "pong"
                                }
                            },
                            "ping": "pong"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:17.093041Z",
                            "start": "2026-03-01T21:03:16.820326Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-0000000000ac",
                        "name": "Verify we can still SSH to all nodes"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "command",
                            "changed": false,
                            "false_condition": "ansible_os_family == \"Windows\"",
                            "skip_reason": "Conditional result was False",
                            "skipped": true
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000016",
                        "name": "add-build-sshkey",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/add-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:17.126303Z",
                            "start": "2026-03-01T21:03:17.098971Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-0000000000ad",
                        "name": "Verify we can still SSH to all nodes (windows)"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "zuul_console",
                            "changed": false,
                            "invocation": {
                                "module_args": {
                                    "path": "/tmp/console-{log_uuid}.log",
                                    "port": 19885,
                                    "state": "present"
                                }
                            }
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-00000000001b",
                        "name": "prepare-workspace",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/prepare-workspace"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:17.454131Z",
                            "start": "2026-03-01T21:03:17.156693Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-00000000001d",
                        "name": "Start zuul_console daemon."
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result",
                            "changed": true
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-00000000001b",
                        "name": "prepare-workspace",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/prepare-workspace"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:18.981631Z",
                            "start": "2026-03-01T21:03:17.465359Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-00000000001e",
                        "name": "Synchronize src repos to workspace directory."
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": false,
                            "msg": "All items completed",
                            "results": [
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": false,
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": null,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/home/zuul/zuul-output/logs",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "absent",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "path": "/home/zuul/zuul-output/logs",
                                    "state": "absent",
                                    "zj_output_dir": "logs"
                                },
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": false,
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": null,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/home/zuul/zuul-output/artifacts",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "absent",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "path": "/home/zuul/zuul-output/artifacts",
                                    "state": "absent",
                                    "zj_output_dir": "artifacts"
                                },
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": false,
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": null,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/home/zuul/zuul-output/docs",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "absent",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "path": "/home/zuul/zuul-output/docs",
                                    "state": "absent",
                                    "zj_output_dir": "docs"
                                }
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000020",
                        "name": "ensure-output-dirs",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/ensure-output-dirs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:19.506461Z",
                            "start": "2026-03-01T21:03:18.990572Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-000000000022",
                        "name": "Empty Zuul Output directories by removing them"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "msg": "All items completed",
                            "results": [
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": true,
                                    "diff": {
                                        "after": {
                                            "mode": "0755",
                                            "path": "/home/zuul/zuul-output/logs",
                                            "state": "directory"
                                        },
                                        "before": {
                                            "mode": "0775",
                                            "path": "/home/zuul/zuul-output/logs",
                                            "state": "absent"
                                        }
                                    },
                                    "failed": false,
                                    "gid": 1000,
                                    "group": "zuul",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": 493,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/home/zuul/zuul-output/logs",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "zuul",
                                    "path": "/home/zuul/zuul-output/logs",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 1000,
                                    "zj_output_dir": "logs"
                                },
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": true,
                                    "diff": {
                                        "after": {
                                            "mode": "0755",
                                            "path": "/home/zuul/zuul-output/artifacts",
                                            "state": "directory"
                                        },
                                        "before": {
                                            "mode": "0775",
                                            "path": "/home/zuul/zuul-output/artifacts",
                                            "state": "absent"
                                        }
                                    },
                                    "failed": false,
                                    "gid": 1000,
                                    "group": "zuul",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": 493,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/home/zuul/zuul-output/artifacts",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "zuul",
                                    "path": "/home/zuul/zuul-output/artifacts",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 1000,
                                    "zj_output_dir": "artifacts"
                                },
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": true,
                                    "diff": {
                                        "after": {
                                            "mode": "0755",
                                            "path": "/home/zuul/zuul-output/docs",
                                            "state": "directory"
                                        },
                                        "before": {
                                            "mode": "0775",
                                            "path": "/home/zuul/zuul-output/docs",
                                            "state": "absent"
                                        }
                                    },
                                    "failed": false,
                                    "gid": 1000,
                                    "group": "zuul",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": 493,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/home/zuul/zuul-output/docs",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "zuul",
                                    "path": "/home/zuul/zuul-output/docs",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 1000,
                                    "zj_output_dir": "docs"
                                }
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-7fd7-c28d-000000000020",
                        "name": "ensure-output-dirs",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/ensure-output-dirs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:20.020112Z",
                            "start": "2026-03-01T21:03:19.516420Z"
                        },
                        "id": "0242ac17-0011-7fd7-c28d-000000000024",
                        "name": "Ensure Zuul Output directories exist"
                    }
                }
            ]
        }
    ],
    "stats": {
        "instance": {
            "changed": 8,
            "failures": 0,
            "ignored": 0,
            "ok": 15,
            "rescued": 0,
            "skipped": 3,
            "unreachable": 0
        },
        "localhost": {
            "changed": 2,
            "failures": 0,
            "ignored": 0,
            "ok": 6,
            "rescued": 0,
            "skipped": 0,
            "unreachable": 0
        }
    },
    "trusted": true
},
{
    "branch": "main",
    "index": "1",
    "phase": "pre",
    "playbook": "github.com/vexxhost/zuul-jobs/playbooks/molecule/pre.yaml",
    "plays": [
        {
            "play": {
                "duration": {
                    "end": "2026-03-01T21:03:23.468961Z",
                    "start": "2026-03-01T21:03:20.778861Z"
                },
                "id": "0242ac17-0011-2f83-1ece-000000000002",
                "name": "all"
            },
            "tasks": [
                {
                    "hosts": {
                        "instance": {
                            "action": "ansible.builtin.unarchive",
                            "changed": true,
                            "dest": "/usr/local/bin",
                            "extract_results": {
                                "cmd": [
                                    "/usr/bin/tar",
                                    "--extract",
                                    "-C",
                                    "/usr/local/bin",
                                    "-z",
                                    "--show-transformed-names",
                                    "--strip-components=1",
                                    "-f",
                                    "/home/zuul/.ansible/tmp/ansible-tmp-1772399000.8364556-5-21225464856567/uv-x86_64-unknown-linux-gnu0x3cxj8m.tar.gz"
                                ],
                                "err": "",
                                "out": "",
                                "rc": 0
                            },
                            "gid": 0,
                            "group": "root",
                            "handler": "TgzArchive",
                            "invocation": {
                                "module_args": {
                                    "attributes": null,
                                    "copy": true,
                                    "creates": "/usr/local/bin/uv",
                                    "decrypt": true,
                                    "dest": "/usr/local/bin",
                                    "exclude": [],
                                    "extra_opts": [
                                        "--strip-components=1"
                                    ],
                                    "group": null,
                                    "include": [],
                                    "io_buffer_size": 65536,
                                    "keep_newer": false,
                                    "list_files": false,
                                    "mode": null,
                                    "owner": null,
                                    "remote_src": true,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": "https://github.com/astral-sh/uv/releases/download/0.8.13/uv-x86_64-unknown-linux-gnu.tar.gz",
                                    "unsafe_writes": false,
                                    "validate_certs": true
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "size": 4096,
                            "src": "/home/zuul/.ansible/tmp/ansible-tmp-1772399000.8364556-5-21225464856567/uv-x86_64-unknown-linux-gnu0x3cxj8m.tar.gz",
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-2f83-1ece-000000000005",
                        "name": "setup-uv",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_0/github.com/vexxhost/zuul-jobs/roles/setup-uv"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:22.927670Z",
                            "start": "2026-03-01T21:03:20.792838Z"
                        },
                        "id": "0242ac17-0011-2f83-1ece-000000000007",
                        "name": "Extract archive"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "ansible.builtin.command",
                            "changed": true,
                            "cmd": [
                                "/usr/local/bin/uv",
                                "--version"
                            ],
                            "delta": "0:00:00.011672",
                            "end": "2026-03-01 21:03:23.250162",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "/usr/local/bin/uv --version",
                                    "_uses_shell": false,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": null,
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0011-2f83-1ece-000000000008-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-03-01 21:03:23.238490",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "uv 0.8.13",
                            "stdout_lines": [
                                "uv 0.8.13"
                            ],
                            "zuul_log_id": "0242ac17-0011-2f83-1ece-000000000008-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-2f83-1ece-000000000005",
                        "name": "setup-uv",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_0/github.com/vexxhost/zuul-jobs/roles/setup-uv"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:23.468961Z",
                            "start": "2026-03-01T21:03:22.952179Z"
                        },
                        "id": "0242ac17-0011-2f83-1ece-000000000008",
                        "name": "Print version"
                    }
                }
            ]
        }
    ],
    "stats": {
        "instance": {
            "changed": 2,
            "failures": 0,
            "ignored": 0,
            "ok": 2,
            "rescued": 0,
            "skipped": 0,
            "unreachable": 0
        }
    },
    "trusted": false
},
{
    "branch": "stable/2023.2",
    "index": "2",
    "phase": "pre",
    "playbook": "github.com/vexxhost/atmosphere/test-playbooks/molecule/pre.yml",
    "plays": [
        {
            "play": {
                "duration": {
                    "end": "2026-03-01T21:03:32.820794Z",
                    "start": "2026-03-01T21:03:24.222105Z"
                },
                "id": "0242ac17-0011-d5ba-40af-000000000002",
                "name": "all"
            },
            "tasks": [
                {
                    "hosts": {
                        "instance": {
                            "action": "ansible.builtin.apt",
                            "cache_update_time": 1772399008,
                            "cache_updated": true,
                            "changed": true,
                            "diff": {},
                            "invocation": {
                                "module_args": {
                                    "allow_change_held_packages": false,
                                    "allow_downgrade": false,
                                    "allow_unauthenticated": false,
                                    "autoclean": false,
                                    "autoremove": false,
                                    "cache_valid_time": 0,
                                    "clean": false,
                                    "deb": null,
                                    "default_release": null,
                                    "dpkg_options": "force-confdef,force-confold",
                                    "fail_on_autoremove": false,
                                    "force": false,
                                    "force_apt_get": false,
                                    "install_recommends": null,
                                    "lock_timeout": 60,
                                    "name": "jq",
                                    "only_upgrade": false,
                                    "package": [
                                        "jq"
                                    ],
                                    "policy_rc_d": null,
                                    "purge": false,
                                    "state": "present",
                                    "update_cache": true,
                                    "update_cache_retries": 5,
                                    "update_cache_retry_max_delay": 12,
                                    "upgrade": null
                                }
                            },
                            "stderr": "debconf: delaying package configuration, since apt-utils is not installed\n",
                            "stderr_lines": [
                                "debconf: delaying package configuration, since apt-utils is not installed"
                            ],
                            "stdout": "Reading package lists...\nBuilding dependency tree...\nReading state information...\nThe following additional packages will be installed:\n  libjq1 libonig5\nThe following NEW packages will be installed:\n  jq libjq1 libonig5\n0 upgraded, 3 newly installed, 0 to remove and 2 not upgraded.\nNeed to get 357 kB of archives.\nAfter this operation, 1087 kB of additional disk space will be used.\nGet:1 http://nova.clouds.archive.ubuntu.com/ubuntu jammy/main amd64 libonig5 amd64 6.9.7.1-2build1 [172 kB]\nGet:2 http://nova.clouds.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libjq1 amd64 1.6-2.1ubuntu3.1 [133 kB]\nGet:3 http://nova.clouds.archive.ubuntu.com/ubuntu jammy-updates/main amd64 jq amd64 1.6-2.1ubuntu3.1 [52.5 kB]\nFetched 357 kB in 0s (840 kB/s)\nSelecting previously unselected package libonig5:amd64.\r\n(Reading database ... \r(Reading database ... 5%\r(Reading database ... 10%\r(Reading database ... 15%\r(Reading database ... 20%\r(Reading database ... 25%\r(Reading database ... 30%\r(Reading database ... 35%\r(Reading database ... 40%\r(Reading database ... 45%\r(Reading database ... 50%\r(Reading database ... 55%\r(Reading database ... 60%\r(Reading database ... 65%\r(Reading database ... 70%\r(Reading database ... 75%\r(Reading database ... 80%\r(Reading database ... 85%\r(Reading database ... 90%\r(Reading database ... 95%\r(Reading database ... 100%\r(Reading database ... 30907 files and directories currently installed.)\r\nPreparing to unpack .../libonig5_6.9.7.1-2build1_amd64.deb ...\r\nUnpacking libonig5:amd64 (6.9.7.1-2build1) ...\r\nSelecting previously unselected package libjq1:amd64.\r\nPreparing to unpack .../libjq1_1.6-2.1ubuntu3.1_amd64.deb ...\r\nUnpacking libjq1:amd64 (1.6-2.1ubuntu3.1) ...\r\nSelecting previously unselected package jq.\r\nPreparing to unpack .../jq_1.6-2.1ubuntu3.1_amd64.deb ...\r\nUnpacking jq (1.6-2.1ubuntu3.1) ...\r\nSetting up libonig5:amd64 (6.9.7.1-2build1) ...\r\nSetting up libjq1:amd64 (1.6-2.1ubuntu3.1) ...\r\nSetting up jq (1.6-2.1ubuntu3.1) ...\r\nProcessing triggers for libc-bin (2.35-0ubuntu3.13) ...\r\n",
                            "stdout_lines": [
                                "Reading package lists...",
                                "Building dependency tree...",
                                "Reading state information...",
                                "The following additional packages will be installed:",
                                "  libjq1 libonig5",
                                "The following NEW packages will be installed:",
                                "  jq libjq1 libonig5",
                                "0 upgraded, 3 newly installed, 0 to remove and 2 not upgraded.",
                                "Need to get 357 kB of archives.",
                                "After this operation, 1087 kB of additional disk space will be used.",
                                "Get:1 http://nova.clouds.archive.ubuntu.com/ubuntu jammy/main amd64 libonig5 amd64 6.9.7.1-2build1 [172 kB]",
                                "Get:2 http://nova.clouds.archive.ubuntu.com/ubuntu jammy-updates/main amd64 libjq1 amd64 1.6-2.1ubuntu3.1 [133 kB]",
                                "Get:3 http://nova.clouds.archive.ubuntu.com/ubuntu jammy-updates/main amd64 jq amd64 1.6-2.1ubuntu3.1 [52.5 kB]",
                                "Fetched 357 kB in 0s (840 kB/s)",
                                "Selecting previously unselected package libonig5:amd64.",
                                "(Reading database ... ",
                                "(Reading database ... 5%",
                                "(Reading database ... 10%",
                                "(Reading database ... 15%",
                                "(Reading database ... 20%",
                                "(Reading database ... 25%",
                                "(Reading database ... 30%",
                                "(Reading database ... 35%",
                                "(Reading database ... 40%",
                                "(Reading database ... 45%",
                                "(Reading database ... 50%",
                                "(Reading database ... 55%",
                                "(Reading database ... 60%",
                                "(Reading database ... 65%",
                                "(Reading database ... 70%",
                                "(Reading database ... 75%",
                                "(Reading database ... 80%",
                                "(Reading database ... 85%",
                                "(Reading database ... 90%",
                                "(Reading database ... 95%",
                                "(Reading database ... 100%",
                                "(Reading database ... 30907 files and directories currently installed.)",
                                "Preparing to unpack .../libonig5_6.9.7.1-2build1_amd64.deb ...",
                                "Unpacking libonig5:amd64 (6.9.7.1-2build1) ...",
                                "Selecting previously unselected package libjq1:amd64.",
                                "Preparing to unpack .../libjq1_1.6-2.1ubuntu3.1_amd64.deb ...",
                                "Unpacking libjq1:amd64 (1.6-2.1ubuntu3.1) ...",
                                "Selecting previously unselected package jq.",
                                "Preparing to unpack .../jq_1.6-2.1ubuntu3.1_amd64.deb ...",
                                "Unpacking jq (1.6-2.1ubuntu3.1) ...",
                                "Setting up libonig5:amd64 (6.9.7.1-2build1) ...",
                                "Setting up libjq1:amd64 (1.6-2.1ubuntu3.1) ...",
                                "Setting up jq (1.6-2.1ubuntu3.1) ...",
                                "Processing triggers for libc-bin (2.35-0ubuntu3.13) ..."
                            ]
                        }
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:32.820794Z",
                            "start": "2026-03-01T21:03:24.233597Z"
                        },
                        "id": "0242ac17-0011-d5ba-40af-000000000004",
                        "name": "Install \"jq\" for log collection"
                    }
                }
            ]
        }
    ],
    "stats": {
        "instance": {
            "changed": 1,
            "failures": 0,
            "ignored": 0,
            "ok": 1,
            "rescued": 0,
            "skipped": 0,
            "unreachable": 0
        }
    },
    "trusted": false
},
{
    "branch": "main",
    "index": "0",
    "phase": "run",
    "playbook": "github.com/vexxhost/zuul-jobs/playbooks/molecule/run.yaml",
    "plays": [
        {
            "play": {
                "duration": {
                    "end": "2026-03-01T22:25:01.432279Z",
                    "start": "2026-03-01T21:03:33.566837Z"
                },
                "id": "0242ac17-0011-7e84-f0f8-000000000002",
                "name": "all"
            },
            "tasks": [
                {
                    "hosts": {
                        "instance": {
                            "action": "ansible.builtin.copy",
                            "changed": true,
                            "checksum": "a27056f9bc83510c10e2adecf43ab02ad53f4aa5",
                            "dest": "src/github.com/vexxhost/atmosphere/inventory.yaml",
                            "diff": [],
                            "gid": 1000,
                            "group": "zuul",
                            "invocation": {
                                "module_args": {
                                    "_original_basename": "inventory.yaml",
                                    "attributes": null,
                                    "backup": false,
                                    "checksum": "a27056f9bc83510c10e2adecf43ab02ad53f4aa5",
                                    "content": null,
                                    "dest": "src/github.com/vexxhost/atmosphere",
                                    "directory_mode": null,
                                    "follow": false,
                                    "force": true,
                                    "group": null,
                                    "local_follow": null,
                                    "mode": 420,
                                    "owner": null,
                                    "remote_src": null,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": "/home/zuul/.ansible/tmp/ansible-tmp-1772399013.6350586-5-234608183637782/source",
                                    "unsafe_writes": false,
                                    "validate": null
                                }
                            },
                            "md5sum": "7b5fd65be8cba4da524e06989b53bf24",
                            "mode": "0644",
                            "owner": "zuul",
                            "size": 41631,
                            "src": "/home/zuul/.ansible/tmp/ansible-tmp-1772399013.6350586-5-234608183637782/source",
                            "state": "file",
                            "uid": 1000
                        }
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:34.434332Z",
                            "start": "2026-03-01T21:03:33.578661Z"
                        },
                        "id": "0242ac17-0011-7e84-f0f8-000000000004",
                        "name": "Copy inventory file for Zuul"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "ansible.builtin.replace",
                            "changed": true,
                            "invocation": {
                                "module_args": {
                                    "after": null,
                                    "attributes": null,
                                    "backup": false,
                                    "before": null,
                                    "encoding": "utf-8",
                                    "group": null,
                                    "mode": null,
                                    "owner": null,
                                    "path": "src/github.com/vexxhost/atmosphere/inventory.yaml",
                                    "regexp": "(^\\s*ansible_host:\\s*).*$",
                                    "replace": "\\1\"{{ nodepool.private_ipv4 }}\"",
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "unsafe_writes": false,
                                    "validate": null
                                }
                            },
                            "msg": "1 replacements made",
                            "rc": 0
                        }
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T21:03:34.798482Z",
                            "start": "2026-03-01T21:03:34.447326Z"
                        },
                        "id": "0242ac17-0011-7e84-f0f8-000000000005",
                        "name": "Switch \"ansible_host\" to private IP"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "ansible.builtin.command",
                            "changed": true,
                            "cmd": [
                                "uv",
                                "run",
                                "molecule",
                                "test",
                                "--destroy",
                                "never",
                                "-s",
                                "aio"
                            ],
                            "delta": "1:21:26.088538",
                            "end": "2026-03-01 22:25:01.223449",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "uv run molecule test --destroy never -s aio",
                                    "_uses_shell": false,
                                    "argv": null,
                                    "chdir": "src/github.com/vexxhost/atmosphere",
                                    "creates": null,
                                    "executable": null,
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0011-7e84-f0f8-000000000006-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-03-01 21:03:35.134911",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "Using CPython 3.10.12 interpreter at: /usr/bin/python3\nCreating virtual environment at: .venv\n   Building atmosphere @ file:///home/zuul/src/github.com/vexxhost/atmosphere\nDownloading pygments (1.2MiB)\nDownloading setuptools (1.1MiB)\nDownloading rjsonnet (1.2MiB)\nDownloading ansible-core (2.1MiB)\nDownloading netaddr (2.2MiB)\nDownloading cryptography (4.2MiB)\nDownloading openstacksdk (1.7MiB)\nDownloading kubernetes (1.9MiB)\n   Building pyperclip==1.9.0\n Downloading rjsonnet\n Downloading pygments\n Downloading netaddr\n Downloading cryptography\n Downloading setuptools\n Downloading kubernetes\n Downloading ansible-core\n Downloading openstacksdk\n      Built pyperclip==1.9.0\n      Built atmosphere @ file:///home/zuul/src/github.com/vexxhost/atmosphere\nInstalled 79 packages in 37ms\nWARNING  Molecule scenarios should migrate to 'extensions/molecule'\nINFO     [aio > discovery] scenario test matrix: dependency, cleanup, destroy, syntax, create, prepare, converge, idempotence, side_effect, verify, cleanup, destroy\nINFO     [aio > prerun] Performing prerun with role_name_check=0...\nINFO     [aio > dependency] Executing\nWARNING  [aio > dependency] Missing roles requirements file: requirements.yml\nWARNING  [aio > dependency] Missing collections requirements file: collections.yml\nWARNING  [aio > dependency] Executed: 2 missing (Remove from test_sequence to suppress)\nINFO     [aio > cleanup] Executing\nWARNING  [aio > cleanup] Executed: Missing playbook (Remove from test_sequence to suppress)\nINFO     [aio > destroy] Executing\nWARNING  [aio > destroy] Skipping, '--destroy=never' requested.\nINFO     [aio > destroy] Executed: Successful\nINFO     [aio > syntax] Executing\n\nplaybook: /home/zuul/src/github.com/vexxhost/atmosphere/molecule/aio/converge.yml\nINFO     [aio > syntax] Executed: Successful\nINFO     [aio > create] Executing\nWARNING  [aio > create] Executed: Missing playbook (Remove from test_sequence to suppress)\nINFO     [aio > prepare] Executing\n\nPLAY [Prepare] *****************************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:04:21 +0000 (0:00:00.026)       0:00:00.026 **********\n[WARNING]: Platform linux on host instance is using the discovered Python\ninterpreter at /usr/bin/python3.10, but future installation of another Python\ninterpreter could change the meaning of that path. See\nhttps://docs.ansible.com/ansible-\ncore/2.17/reference_appendices/interpreter_discovery.html for more information.\nok: [instance]\n\nTASK [Configure short hostname] ************************************************\nSunday 01 March 2026  21:04:22 +0000 (0:00:01.164)       0:00:01.191 **********\nchanged: [instance]\n\nTASK [Ensure hostname inside hosts file] ***************************************\nSunday 01 March 2026  21:04:23 +0000 (0:00:00.680)       0:00:01.871 **********\n[WARNING]: Module remote_tmp /root/.ansible/tmp did not exist and was created\nwith a mode of 0700, this may cause issues when running as another user. To\navoid this, create the remote_tmp dir with the correct permissions manually\nchanged: [instance]\n\nTASK [Install \"dirmngr\" for GPG keyserver operations] **************************\nSunday 01 March 2026  21:04:23 +0000 (0:00:00.264)       0:00:02.136 **********\nok: [instance]\n\nTASK [Purge \"snapd\" package] ***************************************************\nSunday 01 March 2026  21:04:24 +0000 (0:00:01.123)       0:00:03.259 **********\nok: [instance]\n\nPLAY [Generate workspace for Atmosphere] ***************************************\n\nTASK [Create folders for workspace] ********************************************\nSunday 01 March 2026  21:04:25 +0000 (0:00:00.686)       0:00:03.946 **********\nchanged: [localhost] => (item=group_vars)\nchanged: [localhost] => (item=group_vars/all)\nchanged: [localhost] => (item=group_vars/controllers)\nchanged: [localhost] => (item=group_vars/cephs)\nchanged: [localhost] => (item=group_vars/computes)\nchanged: [localhost] => (item=host_vars)\n\nPLAY [Generate Ceph control plane configuration for workspace] *****************\n\nTASK [Ensure the Ceph control plane configuration file exists] *****************\nSunday 01 March 2026  21:04:26 +0000 (0:00:01.010)       0:00:04.956 **********\nchanged: [localhost]\n\nTASK [Load the current Ceph control plane configuration into a variable] *******\nSunday 01 March 2026  21:04:26 +0000 (0:00:00.182)       0:00:05.139 **********\nok: [localhost]\n\nTASK [Generate Ceph control plane values for missing variables] ****************\nSunday 01 March 2026  21:04:26 +0000 (0:00:00.024)       0:00:05.163 **********\nok: [localhost] => (item={'key': 'ceph_fsid', 'value': '7aabea57-4b0a-554c-aac8-9f3c15dba566'})\nok: [localhost] => (item={'key': 'ceph_mon_public_network', 'value': '10.96.240.0/24'})\n\nTASK [Write new Ceph control plane configuration file to disk] *****************\nSunday 01 March 2026  21:04:26 +0000 (0:00:00.046)       0:00:05.210 **********\nchanged: [localhost]\n\nPLAY [Generate Ceph OSD configuration for workspace] ***************************\n\nTASK [Ensure the Ceph OSDs configuration file exists] **************************\nSunday 01 March 2026  21:04:27 +0000 (0:00:00.521)       0:00:05.732 **********\nchanged: [localhost]\n\nTASK [Load the current Ceph OSDs configuration into a variable] ****************\nSunday 01 March 2026  21:04:27 +0000 (0:00:00.165)       0:00:05.897 **********\nok: [localhost]\n\nTASK [Generate Ceph OSDs values for missing variables] *************************\nSunday 01 March 2026  21:04:27 +0000 (0:00:00.028)       0:00:05.925 **********\nok: [localhost] => (item={'key': 'ceph_osd_devices', 'value': ['/dev/vdb', '/dev/vdc', '/dev/vdd']})\n\nTASK [Write new Ceph OSDs configuration file to disk] **************************\nSunday 01 March 2026  21:04:27 +0000 (0:00:00.031)       0:00:05.957 **********\nchanged: [localhost]\n\nPLAY [Generate Kubernetes configuration for workspace] *************************\n\nTASK [Ensure the Kubernetes configuration file exists] *************************\nSunday 01 March 2026  21:04:27 +0000 (0:00:00.350)       0:00:06.307 **********\nchanged: [localhost]\n\nTASK [Load the current Kubernetes configuration into a variable] ***************\nSunday 01 March 2026  21:04:27 +0000 (0:00:00.184)       0:00:06.491 **********\nok: [localhost]\n\nTASK [Generate Kubernetes values for missing variables] ************************\nSunday 01 March 2026  21:04:27 +0000 (0:00:00.025)       0:00:06.517 **********\nok: [localhost] => (item={'key': 'kubernetes_hostname', 'value': '10.96.240.10'})\nok: [localhost] => (item={'key': 'kubernetes_keepalived_vrid', 'value': 42})\nok: [localhost] => (item={'key': 'kubernetes_keepalived_vip', 'value': '10.96.240.10'})\n\nTASK [Write new Kubernetes configuration file to disk] *************************\nSunday 01 March 2026  21:04:28 +0000 (0:00:00.039)       0:00:06.557 **********\nchanged: [localhost]\n\nPLAY [Generate Keepalived configuration for workspace] *************************\n\nTASK [Ensure the Keeaplived configuration file exists] *************************\nSunday 01 March 2026  21:04:28 +0000 (0:00:00.363)       0:00:06.920 **********\nchanged: [localhost]\n\nTASK [Load the current Keepalived configuration into a variable] ***************\nSunday 01 March 2026  21:04:28 +0000 (0:00:00.167)       0:00:07.087 **********\nok: [localhost]\n\nTASK [Generate Keepalived values for missing variables] ************************\nSunday 01 March 2026  21:04:28 +0000 (0:00:00.025)       0:00:07.113 **********\nok: [localhost] => (item={'key': 'keepalived_interface', 'value': 'br-ex'})\nok: [localhost] => (item={'key': 'keepalived_vip', 'value': '10.96.250.10'})\n\nTASK [Write new Keepalived configuration file to disk] *************************\nSunday 01 March 2026  21:04:28 +0000 (0:00:00.034)       0:00:07.147 **********\nchanged: [localhost]\n\nPLAY [Generate endpoints for workspace] ****************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:04:28 +0000 (0:00:00.353)       0:00:07.501 **********\nok: [localhost]\n\nTASK [Ensure the endpoints file exists] ****************************************\nSunday 01 March 2026  21:04:29 +0000 (0:00:00.680)       0:00:08.182 **********\nchanged: [localhost]\n\nTASK [Load the current endpoints into a variable] ******************************\nSunday 01 March 2026  21:04:29 +0000 (0:00:00.169)       0:00:08.352 **********\nok: [localhost]\n\nTASK [Generate endpoint skeleton for missing variables] ************************\nSunday 01 March 2026  21:04:29 +0000 (0:00:00.027)       0:00:08.379 **********\nok: [localhost] => (item=keycloak_host)\nok: [localhost] => (item=kube_prometheus_stack_grafana_host)\nok: [localhost] => (item=kube_prometheus_stack_alertmanager_host)\nok: [localhost] => (item=kube_prometheus_stack_prometheus_host)\nok: [localhost] => (item=openstack_helm_endpoints_region_name)\nok: [localhost] => (item=openstack_helm_endpoints_keystone_api_host)\nok: [localhost] => (item=openstack_helm_endpoints_glance_api_host)\nok: [localhost] => (item=openstack_helm_endpoints_cinder_api_host)\nok: [localhost] => (item=openstack_helm_endpoints_placement_api_host)\nok: [localhost] => (item=openstack_helm_endpoints_barbican_api_host)\nok: [localhost] => (item=openstack_helm_endpoints_neutron_api_host)\nok: [localhost] => (item=openstack_helm_endpoints_nova_api_host)\nok: [localhost] => (item=openstack_helm_endpoints_nova_novnc_host)\nok: [localhost] => (item=openstack_helm_endpoints_ironic_api_host)\nok: [localhost] => (item=openstack_helm_endpoints_designate_api_host)\nok: [localhost] => (item=openstack_helm_endpoints_octavia_api_host)\nok: [localhost] => (item=openstack_helm_endpoints_magnum_api_host)\nok: [localhost] => (item=openstack_helm_endpoints_magnum_registry_host)\nok: [localhost] => (item=openstack_helm_endpoints_heat_api_host)\nok: [localhost] => (item=openstack_helm_endpoints_heat_cfn_api_host)\nok: [localhost] => (item=openstack_helm_endpoints_horizon_api_host)\nok: [localhost] => (item=openstack_helm_endpoints_rgw_host)\nok: [localhost] => (item=openstack_helm_endpoints_manila_api_host)\n\nTASK [Write new endpoints file to disk] ****************************************\nSunday 01 March 2026  21:04:30 +0000 (0:00:00.786)       0:00:09.165 **********\nchanged: [localhost]\n\nTASK [Ensure the endpoints file exists] ****************************************\nSunday 01 March 2026  21:04:30 +0000 (0:00:00.349)       0:00:09.515 **********\nchanged: [localhost]\n\nPLAY [Generate Neutron configuration for workspace] ****************************\n\nTASK [Ensure the Neutron configuration file exists] ****************************\nSunday 01 March 2026  21:04:31 +0000 (0:00:00.170)       0:00:09.685 **********\nchanged: [localhost]\n\nTASK [Load the current Neutron configuration into a variable] ******************\nSunday 01 March 2026  21:04:31 +0000 (0:00:00.185)       0:00:09.870 **********\nok: [localhost]\n\nTASK [Generate Neutron values for missing variables] ***************************\nSunday 01 March 2026  21:04:31 +0000 (0:00:00.028)       0:00:09.899 **********\nok: [localhost] => (item={'key': 'neutron_networks', 'value': [{'name': 'public', 'external': True, 'shared': True, 'mtu_size': 1500, 'port_security_enabled': True, 'provider_network_type': 'flat', 'provider_physical_network': 'external', 'subnets': [{'name': 'public-subnet', 'cidr': '10.96.250.0/24', 'gateway_ip': '10.96.250.10', 'allocation_pool_start': '10.96.250.200', 'allocation_pool_end': '10.96.250.220', 'enable_dhcp': True}]}]})\n\nTASK [Write new Neutron configuration file to disk] ****************************\nSunday 01 March 2026  21:04:31 +0000 (0:00:00.039)       0:00:09.938 **********\nchanged: [localhost]\n\nPLAY [Generate Nova configuration for workspace] *******************************\n\nTASK [Ensure the Nova configuration file exists] *******************************\nSunday 01 March 2026  21:04:31 +0000 (0:00:00.339)       0:00:10.277 **********\nchanged: [localhost]\n\nTASK [Load the current Nova configuration into a variable] *********************\nSunday 01 March 2026  21:04:31 +0000 (0:00:00.170)       0:00:10.447 **********\nok: [localhost]\n\nTASK [Generate Nova values for missing variables] ******************************\nSunday 01 March 2026  21:04:31 +0000 (0:00:00.033)       0:00:10.480 **********\nok: [localhost] => (item={'key': 'nova_flavors', 'value': [{'name': 'm1.tiny', 'ram': 512, 'disk': 1, 'vcpus': 1}, {'name': 'm1.small', 'ram': 2048, 'disk': 20, 'vcpus': 1}, {'name': 'm1.medium', 'ram': 4096, 'disk': 40, 'vcpus': 2}, {'name': 'm1.large', 'ram': 8192, 'disk': 80, 'vcpus': 4}, {'name': 'm1.xlarge', 'ram': 16384, 'disk': 160, 'vcpus': 8}]})\n\nTASK [Write new Nova configuration file to disk] *******************************\nSunday 01 March 2026  21:04:31 +0000 (0:00:00.041)       0:00:10.522 **********\nchanged: [localhost]\n\nPLAY [Generate secrets for workspace] ******************************************\n\nTASK [Ensure the secrets file exists] ******************************************\nSunday 01 March 2026  21:04:32 +0000 (0:00:00.352)       0:00:10.875 **********\nchanged: [localhost]\n\nTASK [Load the current secrets into a variable] ********************************\nSunday 01 March 2026  21:04:32 +0000 (0:00:00.172)       0:00:11.047 **********\nok: [localhost]\n\nTASK [Generate secrets for missing variables] **********************************\nSunday 01 March 2026  21:04:32 +0000 (0:00:00.031)       0:00:11.079 **********\nok: [localhost] => (item=heat_auth_encryption_key)\nok: [localhost] => (item=keepalived_password)\nok: [localhost] => (item=keycloak_admin_password)\nok: [localhost] => (item=keycloak_database_password)\nok: [localhost] => (item=keystone_keycloak_client_secret)\nok: [localhost] => (item=keystone_oidc_crypto_passphrase)\nok: [localhost] => (item=kube_prometheus_stack_grafana_admin_password)\nok: [localhost] => (item=octavia_heartbeat_key)\nok: [localhost] => (item=openstack_helm_endpoints_rabbitmq_admin_password)\nok: [localhost] => (item=openstack_helm_endpoints_memcached_secret_key)\nok: [localhost] => (item=openstack_helm_endpoints_keystone_admin_password)\nok: [localhost] => (item=openstack_helm_endpoints_keystone_mariadb_password)\nok: [localhost] => (item=openstack_helm_endpoints_keystone_rabbitmq_password)\nok: [localhost] => (item=openstack_helm_endpoints_glance_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_glance_mariadb_password)\nok: [localhost] => (item=openstack_helm_endpoints_glance_rabbitmq_password)\nok: [localhost] => (item=openstack_helm_endpoints_cinder_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_cinder_mariadb_password)\nok: [localhost] => (item=openstack_helm_endpoints_cinder_rabbitmq_password)\nok: [localhost] => (item=openstack_helm_endpoints_placement_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_placement_mariadb_password)\nok: [localhost] => (item=openstack_helm_endpoints_barbican_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_barbican_mariadb_password)\nok: [localhost] => (item=openstack_helm_endpoints_neutron_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_neutron_mariadb_password)\nok: [localhost] => (item=openstack_helm_endpoints_neutron_rabbitmq_password)\nok: [localhost] => (item=openstack_helm_endpoints_neutron_metadata_secret)\nok: [localhost] => (item=openstack_helm_endpoints_nova_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_nova_mariadb_password)\nok: [localhost] => (item=openstack_helm_endpoints_nova_rabbitmq_password)\nok: [localhost] => (item=openstack_helm_endpoints_ironic_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_ironic_mariadb_password)\nok: [localhost] => (item=openstack_helm_endpoints_ironic_rabbitmq_password)\nok: [localhost] => (item=openstack_helm_endpoints_designate_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_designate_mariadb_password)\nok: [localhost] => (item=openstack_helm_endpoints_designate_rabbitmq_password)\nok: [localhost] => (item=openstack_helm_endpoints_octavia_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_octavia_mariadb_password)\nok: [localhost] => (item=openstack_helm_endpoints_octavia_rabbitmq_password)\nok: [localhost] => (item=openstack_helm_endpoints_magnum_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_magnum_mariadb_password)\nok: [localhost] => (item=openstack_helm_endpoints_magnum_rabbitmq_password)\nok: [localhost] => (item=openstack_helm_endpoints_heat_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_heat_trustee_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_heat_stack_user_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_heat_mariadb_password)\nok: [localhost] => (item=openstack_helm_endpoints_heat_rabbitmq_password)\nok: [localhost] => (item=openstack_helm_endpoints_horizon_mariadb_password)\nok: [localhost] => (item=openstack_helm_endpoints_tempest_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_openstack_exporter_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_rgw_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_manila_keystone_password)\nok: [localhost] => (item=openstack_helm_endpoints_manila_mariadb_password)\nok: [localhost] => (item=openstack_helm_endpoints_staffeln_mariadb_password)\n\nTASK [Generate base64 encoded secrets] *****************************************\nSunday 01 March 2026  21:04:32 +0000 (0:00:00.401)       0:00:11.480 **********\nok: [localhost] => (item=barbican_kek)\n\nTASK [Generate temporary files for generating keys for missing variables] ******\nSunday 01 March 2026  21:04:32 +0000 (0:00:00.057)       0:00:11.537 **********\nchanged: [localhost] => (item=manila_ssh_key)\nchanged: [localhost] => (item=nova_ssh_key)\n\nTASK [Generate SSH keys for missing variables] *********************************\nSunday 01 March 2026  21:04:33 +0000 (0:00:00.387)       0:00:11.924 **********\nchanged: [localhost] => (item=manila_ssh_key)\nchanged: [localhost] => (item=nova_ssh_key)\n\nTASK [Set values for SSH keys] *************************************************\nSunday 01 March 2026  21:04:36 +0000 (0:00:03.221)       0:00:15.146 **********\nok: [localhost] => (item=manila_ssh_key)\nok: [localhost] => (item=nova_ssh_key)\n\nTASK [Delete the temporary files generated for SSH keys] ***********************\nSunday 01 March 2026  21:04:36 +0000 (0:00:00.052)       0:00:15.199 **********\nchanged: [localhost] => (item=manila_ssh_key)\nchanged: [localhost] => (item=nova_ssh_key)\n\nTASK [Write new secrets file to disk] ******************************************\nSunday 01 March 2026  21:04:37 +0000 (0:00:00.352)       0:00:15.552 **********\nchanged: [localhost]\n\nTASK [Encrypt secrets file with Vault password] ********************************\nSunday 01 March 2026  21:04:37 +0000 (0:00:00.346)       0:00:15.898 **********\nskipping: [localhost]\n\nPLAY [Setup networking] ********************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:04:37 +0000 (0:00:00.037)       0:00:15.936 **********\nok: [instance]\n\nTASK [Create bridge for management network] ************************************\nSunday 01 March 2026  21:04:38 +0000 (0:00:00.693)       0:00:16.629 **********\nok: [instance]\n\nTASK [Create fake interface for management bridge] *****************************\nSunday 01 March 2026  21:04:38 +0000 (0:00:00.314)       0:00:16.943 **********\nok: [instance]\n\nTASK [Assign dummy interface to management bridge] *****************************\nSunday 01 March 2026  21:04:38 +0000 (0:00:00.203)       0:00:17.147 **********\nok: [instance]\n\nTASK [Assign IP address for management bridge] *********************************\nSunday 01 March 2026  21:04:38 +0000 (0:00:00.196)       0:00:17.343 **********\nok: [instance]\n\nTASK [Bring up interfaces] *****************************************************\nSunday 01 March 2026  21:04:39 +0000 (0:00:00.210)       0:00:17.554 **********\nok: [instance] => (item=br-mgmt)\nok: [instance] => (item=dummy0)\n\nPLAY [Create devices for Ceph] *************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:04:39 +0000 (0:00:00.382)       0:00:17.937 **********\nok: [instance]\n\nTASK [Install depedencies] *****************************************************\nSunday 01 March 2026  21:04:40 +0000 (0:00:00.745)       0:00:18.682 **********\nchanged: [instance]\n\nTASK [Start up service] ********************************************************\nSunday 01 March 2026  21:04:59 +0000 (0:00:19.624)       0:00:38.307 **********\nok: [instance]\n\nTASK [Generate lvm.conf] *******************************************************\nSunday 01 March 2026  21:05:00 +0000 (0:00:00.518)       0:00:38.826 **********\nok: [instance]\n\nTASK [Write /etc/lvm/lvm.conf] *************************************************\nSunday 01 March 2026  21:05:00 +0000 (0:00:00.220)       0:00:39.046 **********\nchanged: [instance]\n\nTASK [Get list of all loopback devices] ****************************************\nSunday 01 March 2026  21:05:00 +0000 (0:00:00.446)       0:00:39.493 **********\nok: [instance]\n\nTASK [Fail if there is any existing loopback devices] **************************\nSunday 01 March 2026  21:05:01 +0000 (0:00:00.198)       0:00:39.691 **********\nskipping: [instance]\n\nTASK [Create devices for Ceph] *************************************************\nSunday 01 March 2026  21:05:01 +0000 (0:00:00.030)       0:00:39.722 **********\nchanged: [instance] => (item=osd0)\nchanged: [instance] => (item=osd1)\nchanged: [instance] => (item=osd2)\n\nTASK [Set permissions on loopback devices] *************************************\nSunday 01 March 2026  21:05:01 +0000 (0:00:00.509)       0:00:40.232 **********\nchanged: [instance] => (item=osd0)\nchanged: [instance] => (item=osd1)\nchanged: [instance] => (item=osd2)\n\nTASK [Start loop devices] ******************************************************\nSunday 01 March 2026  21:05:02 +0000 (0:00:00.521)       0:00:40.754 **********\nchanged: [instance] => (item=osd0)\nchanged: [instance] => (item=osd1)\nchanged: [instance] => (item=osd2)\n\nTASK [Create a volume group for each loop device] ******************************\nSunday 01 March 2026  21:05:02 +0000 (0:00:00.735)       0:00:41.489 **********\nchanged: [instance] => (item=osd0)\nchanged: [instance] => (item=osd1)\nchanged: [instance] => (item=osd2)\n\nTASK [Create a logical volume for each loop device] ****************************\nSunday 01 March 2026  21:05:06 +0000 (0:00:03.188)       0:00:44.678 **********\nchanged: [instance] => (item=ceph-instance-osd0)\nchanged: [instance] => (item=ceph-instance-osd1)\nchanged: [instance] => (item=ceph-instance-osd2)\n\nPLAY [controllers] *************************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:05:08 +0000 (0:00:01.924)       0:00:46.602 **********\nok: [instance]\n\nTASK [Set masquerade rule] *****************************************************\nSunday 01 March 2026  21:05:08 +0000 (0:00:00.922)       0:00:47.524 **********\nchanged: [instance]\n\nPLAY RECAP *********************************************************************\ninstance                   : ok=24   changed=10   unreachable=0    failed=0    skipped=1    rescued=0    ignored=0\nlocalhost                  : ok=40   changed=21   unreachable=0    failed=0    skipped=1    rescued=0    ignored=0\n\nSunday 01 March 2026  21:05:09 +0000 (0:00:00.459)       0:00:47.984 **********\n===============================================================================\nInstall depedencies ---------------------------------------------------- 19.62s\nGenerate SSH keys for missing variables --------------------------------- 3.22s\nCreate a volume group for each loop device ------------------------------ 3.19s\nCreate a logical volume for each loop device ---------------------------- 1.92s\nGathering Facts --------------------------------------------------------- 1.16s\nInstall \"dirmngr\" for GPG keyserver operations -------------------------- 1.12s\nCreate folders for workspace -------------------------------------------- 1.01s\nGathering Facts --------------------------------------------------------- 0.92s\nGenerate endpoint skeleton for missing variables ------------------------ 0.79s\nGathering Facts --------------------------------------------------------- 0.75s\nStart loop devices ------------------------------------------------------ 0.74s\nGathering Facts --------------------------------------------------------- 0.69s\nPurge \"snapd\" package --------------------------------------------------- 0.69s\nGathering Facts --------------------------------------------------------- 0.68s\nConfigure short hostname ------------------------------------------------ 0.68s\nSet permissions on loopback devices ------------------------------------- 0.52s\nWrite new Ceph control plane configuration file to disk ----------------- 0.52s\nStart up service -------------------------------------------------------- 0.52s\nCreate devices for Ceph ------------------------------------------------- 0.51s\nSet masquerade rule ----------------------------------------------------- 0.46s\nINFO     [aio > prepare] Executed: Successful\nINFO     [aio > converge] Executing\n\nPLAY [all] *********************************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:05:12 +0000 (0:00:00.017)       0:00:00.017 **********\n[WARNING]: Platform linux on host instance is using the discovered Python\ninterpreter at /usr/bin/python3.10, but future installation of another Python\ninterpreter could change the meaning of that path. See\nhttps://docs.ansible.com/ansible-\ncore/2.17/reference_appendices/interpreter_discovery.html for more information.\nok: [instance]\n\nTASK [Set a fact with the \"atmosphere_images\" for other plays] *****************\nSunday 01 March 2026  21:05:13 +0000 (0:00:01.284)       0:00:01.302 **********\nok: [instance]\n\nPLAY [Deploy Ceph monitors & managers] *****************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:05:13 +0000 (0:00:00.304)       0:00:01.606 **********\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  21:05:14 +0000 (0:00:00.879)       0:00:02.486 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  21:05:15 +0000 (0:00:00.282)       0:00:02.768 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nSunday 01 March 2026  21:05:15 +0000 (0:00:00.043)       0:00:02.811 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:05:15 +0000 (0:00:00.284)       0:00:03.096 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:05:15 +0000 (0:00:00.069)       0:00:03.166 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:05:16 +0000 (0:00:00.635)       0:00:03.801 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  21:05:16 +0000 (0:00:00.043)       0:00:03.845 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  21:05:16 +0000 (0:00:00.043)       0:00:03.888 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  21:05:16 +0000 (0:00:00.199)       0:00:04.087 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:05:17 +0000 (0:00:01.265)       0:00:05.353 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:05:17 +0000 (0:00:00.067)       0:00:05.421 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:05:18 +0000 (0:00:00.689)       0:00:06.110 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nSunday 01 March 2026  21:05:21 +0000 (0:00:02.910)       0:00:09.021 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nSunday 01 March 2026  21:05:21 +0000 (0:00:00.031)       0:00:09.052 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nSunday 01 March 2026  21:05:21 +0000 (0:00:00.032)       0:00:09.084 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nSunday 01 March 2026  21:05:21 +0000 (0:00:00.025)       0:00:09.109 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nSunday 01 March 2026  21:05:26 +0000 (0:00:05.317)       0:00:14.427 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nSunday 01 March 2026  21:05:27 +0000 (0:00:00.541)       0:00:14.969 **********\nchanged: [instance] => (item={'path': '/etc/containerd'})\nchanged: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nchanged: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nchanged: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nchanged: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nSunday 01 March 2026  21:05:28 +0000 (0:00:00.894)       0:00:15.863 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nSunday 01 March 2026  21:05:28 +0000 (0:00:00.526)       0:00:16.389 **********\n\nRUNNING HANDLER [vexxhost.containers.containerd : Reload systemd] **************\nSunday 01 March 2026  21:05:28 +0000 (0:00:00.008)       0:00:16.398 **********\nok: [instance]\n\nRUNNING HANDLER [vexxhost.containers.containerd : Restart containerd] **********\nSunday 01 March 2026  21:05:29 +0000 (0:00:00.882)       0:00:17.281 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nSunday 01 March 2026  21:05:30 +0000 (0:00:00.474)       0:00:17.755 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  21:05:30 +0000 (0:00:00.541)       0:00:18.297 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:05:30 +0000 (0:00:00.212)       0:00:18.509 **********\nok: [instance] => {\n    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:05:30 +0000 (0:00:00.066)       0:00:18.576 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:05:31 +0000 (0:00:00.852)       0:00:19.429 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.docker : Install AppArmor packages] ******************\nSunday 01 March 2026  21:05:35 +0000 (0:00:04.278)       0:00:23.707 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************\nSunday 01 March 2026  21:05:36 +0000 (0:00:00.936)       0:00:24.644 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.docker : Create systemd service file for docker] *****\nSunday 01 March 2026  21:05:37 +0000 (0:00:00.313)       0:00:24.957 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.docker : Create folders for configuration] ***********\nSunday 01 March 2026  21:05:37 +0000 (0:00:00.443)       0:00:25.401 **********\nchanged: [instance] => (item={'path': '/etc/docker'})\nchanged: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})\nchanged: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})\n\nTASK [vexxhost.containers.docker : Create systemd socket file for docker] ******\nSunday 01 March 2026  21:05:38 +0000 (0:00:00.568)       0:00:25.969 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.docker : Create docker daemon config file] ***********\nSunday 01 March 2026  21:05:38 +0000 (0:00:00.446)       0:00:26.416 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.docker : Force any restarts if necessary] ************\nSunday 01 March 2026  21:05:39 +0000 (0:00:00.428)       0:00:26.845 **********\n\nRUNNING HANDLER [vexxhost.containers.containerd : Reload systemd] **************\nSunday 01 March 2026  21:05:39 +0000 (0:00:00.009)       0:00:26.855 **********\nok: [instance]\n\nRUNNING HANDLER [vexxhost.containers.docker : Restart docker] ******************\nSunday 01 March 2026  21:05:39 +0000 (0:00:00.722)       0:00:27.577 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.docker : Enable and start service] *******************\nSunday 01 March 2026  21:05:40 +0000 (0:00:00.821)       0:00:28.399 **********\nchanged: [instance]\n\nTASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******\nSunday 01 March 2026  21:05:41 +0000 (0:00:00.563)       0:00:28.962 **********\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)\n\nTASK [vexxhost.ceph.cephadm : Install packages] ********************************\nSunday 01 March 2026  21:05:41 +0000 (0:00:00.060)       0:00:29.022 **********\nchanged: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure services are started] *********************\nSunday 01 March 2026  21:05:45 +0000 (0:00:04.692)       0:00:33.715 **********\nok: [instance] => (item=chronyd)\nok: [instance] => (item=sshd)\n\nTASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************\nSunday 01 March 2026  21:05:46 +0000 (0:00:00.640)       0:00:34.355 **********\nchanged: [instance]\n\nTASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************\nSunday 01 March 2026  21:05:46 +0000 (0:00:00.301)       0:00:34.656 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************\nSunday 01 March 2026  21:05:47 +0000 (0:00:00.204)       0:00:34.861 **********\nchanged: [instance]\n\nTASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***\nSunday 01 March 2026  21:05:47 +0000 (0:00:00.471)       0:00:35.332 **********\nchanged: [instance]\n\nTASK [vexxhost.ceph.mon : Get `cephadm ls` status] *****************************\nSunday 01 March 2026  21:05:47 +0000 (0:00:00.370)       0:00:35.703 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Parse the `cephadm ls` output] ***********************\nSunday 01 March 2026  21:05:49 +0000 (0:00:01.673)       0:00:37.377 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Assimilate existing configs in `ceph.conf`] **********\nSunday 01 March 2026  21:05:49 +0000 (0:00:00.061)       0:00:37.439 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Adopt monitor to cluster] ****************************\nSunday 01 March 2026  21:05:49 +0000 (0:00:00.048)       0:00:37.487 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Adopt manager to cluster] ****************************\nSunday 01 March 2026  21:05:49 +0000 (0:00:00.042)       0:00:37.529 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Enable \"cephadm\" mgr module] *************************\nSunday 01 March 2026  21:05:49 +0000 (0:00:00.041)       0:00:37.571 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Set orchestrator backend to \"cephadm\"] ***************\nSunday 01 March 2026  21:05:49 +0000 (0:00:00.044)       0:00:37.615 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Use `cephadm` user for cephadm] **********************\nSunday 01 March 2026  21:05:49 +0000 (0:00:00.040)       0:00:37.655 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Generate \"cephadm\" key] ******************************\nSunday 01 March 2026  21:05:49 +0000 (0:00:00.040)       0:00:37.696 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Set Ceph Monitor IP address] *************************\nSunday 01 March 2026  21:05:50 +0000 (0:00:00.043)       0:00:37.739 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Check if any node is bootstrapped] *******************\nSunday 01 March 2026  21:05:50 +0000 (0:00:00.119)       0:00:37.858 **********\nok: [instance] => (item=instance)\n\nTASK [vexxhost.ceph.mon : Select pre-existing bootstrap node if exists] ********\nSunday 01 March 2026  21:05:50 +0000 (0:00:00.209)       0:00:38.068 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Bootstrap cluster] ***********************************\nSunday 01 March 2026  21:05:50 +0000 (0:00:00.052)       0:00:38.121 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/mon/tasks/bootstrap-ceph.yml for instance\n\nTASK [vexxhost.ceph.mon : Generate temporary file for \"ceph.conf\"] *************\nSunday 01 March 2026  21:05:50 +0000 (0:00:00.073)       0:00:38.194 **********\nchanged: [instance]\n\nTASK [vexxhost.ceph.mon : Include extra configuration values] ******************\nSunday 01 March 2026  21:05:50 +0000 (0:00:00.343)       0:00:38.538 **********\nchanged: [instance] => (item={'option': 'mon allow pool size one', 'section': 'global', 'value': True})\nchanged: [instance] => (item={'option': 'osd crush chooseleaf type', 'section': 'global', 'value': 0})\nchanged: [instance] => (item={'option': 'auth allow insecure global id reclaim', 'section': 'mon', 'value': False})\n\nTASK [vexxhost.ceph.mon : Run Bootstrap coomand] *******************************\nSunday 01 March 2026  21:05:51 +0000 (0:00:00.697)       0:00:39.235 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Remove temporary file for \"ceph.conf\"] ***************\nSunday 01 March 2026  21:07:55 +0000 (0:02:03.512)       0:02:42.748 **********\nchanged: [instance]\n\nTASK [vexxhost.ceph.mon : Set bootstrap node] **********************************\nSunday 01 March 2026  21:07:55 +0000 (0:00:00.203)       0:02:42.951 **********\nok: [instance]\n\nTASK [Install Ceph host] *******************************************************\nSunday 01 March 2026  21:07:55 +0000 (0:00:00.043)       0:02:42.995 **********\nincluded: vexxhost.ceph.cephadm_host for instance\n\nTASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******\nSunday 01 March 2026  21:07:55 +0000 (0:00:00.081)       0:02:43.076 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***\nSunday 01 March 2026  21:07:56 +0000 (0:00:01.584)       0:02:44.661 **********\nok: [instance] => (item=instance)\n\nTASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********\nSunday 01 March 2026  21:07:56 +0000 (0:00:00.063)       0:02:44.724 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************\nSunday 01 March 2026  21:07:57 +0000 (0:00:00.411)       0:02:45.135 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Configure \"mon\" label for monitors] ******************\nSunday 01 March 2026  21:07:59 +0000 (0:00:01.901)       0:02:47.037 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Validate monitor exist] ******************************\nSunday 01 March 2026  21:08:00 +0000 (0:00:01.618)       0:02:48.656 **********\nok: [instance]\n\nTASK [Install Ceph host] *******************************************************\nSunday 01 March 2026  21:08:11 +0000 (0:00:10.382)       0:02:59.038 **********\nincluded: vexxhost.ceph.cephadm_host for instance\n\nTASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******\nSunday 01 March 2026  21:08:11 +0000 (0:00:00.076)       0:02:59.115 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***\nSunday 01 March 2026  21:08:11 +0000 (0:00:00.053)       0:02:59.169 **********\nskipping: [instance] => (item=instance)\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********\nSunday 01 March 2026  21:08:11 +0000 (0:00:00.049)       0:02:59.218 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************\nSunday 01 March 2026  21:08:11 +0000 (0:00:00.259)       0:02:59.477 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mgr : Configure \"mgr\" label for managers] ******************\nSunday 01 March 2026  21:08:13 +0000 (0:00:01.978)       0:03:01.456 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mgr : Validate manager exist] ******************************\nSunday 01 March 2026  21:08:15 +0000 (0:00:01.652)       0:03:03.108 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mgr : Enable the Ceph Manager prometheus module] ***********\nSunday 01 March 2026  21:08:16 +0000 (0:00:01.592)       0:03:04.700 **********\nok: [instance]\n\nPLAY [Deploy Ceph OSDs] ********************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:08:19 +0000 (0:00:02.534)       0:03:07.235 **********\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  21:08:20 +0000 (0:00:00.938)       0:03:08.174 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  21:08:20 +0000 (0:00:00.211)       0:03:08.385 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nSunday 01 March 2026  21:08:20 +0000 (0:00:00.047)       0:03:08.433 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:08:20 +0000 (0:00:00.222)       0:03:08.656 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:08:20 +0000 (0:00:00.061)       0:03:08.718 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:08:21 +0000 (0:00:00.308)       0:03:09.026 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  21:08:21 +0000 (0:00:00.056)       0:03:09.083 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  21:08:21 +0000 (0:00:00.051)       0:03:09.134 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  21:08:21 +0000 (0:00:00.210)       0:03:09.345 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:08:22 +0000 (0:00:01.278)       0:03:10.623 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:08:22 +0000 (0:00:00.069)       0:03:10.693 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:08:23 +0000 (0:00:00.333)       0:03:11.026 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nSunday 01 March 2026  21:08:25 +0000 (0:00:01.890)       0:03:12.916 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nSunday 01 March 2026  21:08:25 +0000 (0:00:00.034)       0:03:12.951 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nSunday 01 March 2026  21:08:25 +0000 (0:00:00.036)       0:03:12.988 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nSunday 01 March 2026  21:08:25 +0000 (0:00:00.034)       0:03:13.022 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nSunday 01 March 2026  21:08:26 +0000 (0:00:01.168)       0:03:14.190 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nSunday 01 March 2026  21:08:26 +0000 (0:00:00.427)       0:03:14.617 **********\nok: [instance] => (item={'path': '/etc/containerd'})\nok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nSunday 01 March 2026  21:08:27 +0000 (0:00:00.882)       0:03:15.500 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nSunday 01 March 2026  21:08:28 +0000 (0:00:00.481)       0:03:15.981 **********\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nSunday 01 March 2026  21:08:28 +0000 (0:00:00.006)       0:03:15.988 **********\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  21:08:28 +0000 (0:00:00.342)       0:03:16.330 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:08:28 +0000 (0:00:00.195)       0:03:16.526 **********\nok: [instance] => {\n    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:08:28 +0000 (0:00:00.052)       0:03:16.578 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:08:29 +0000 (0:00:00.336)       0:03:16.915 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Install AppArmor packages] ******************\nSunday 01 March 2026  21:08:32 +0000 (0:00:03.030)       0:03:19.945 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************\nSunday 01 March 2026  21:08:33 +0000 (0:00:01.162)       0:03:21.107 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create systemd service file for docker] *****\nSunday 01 March 2026  21:08:33 +0000 (0:00:00.194)       0:03:21.302 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create folders for configuration] ***********\nSunday 01 March 2026  21:08:33 +0000 (0:00:00.392)       0:03:21.694 **********\nok: [instance] => (item={'path': '/etc/docker'})\nok: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})\nok: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})\n\nTASK [vexxhost.containers.docker : Create systemd socket file for docker] ******\nSunday 01 March 2026  21:08:34 +0000 (0:00:00.521)       0:03:22.215 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create docker daemon config file] ***********\nSunday 01 March 2026  21:08:34 +0000 (0:00:00.390)       0:03:22.606 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Force any restarts if necessary] ************\nSunday 01 March 2026  21:08:35 +0000 (0:00:00.415)       0:03:23.022 **********\n\nTASK [vexxhost.containers.docker : Enable and start service] *******************\nSunday 01 March 2026  21:08:35 +0000 (0:00:00.006)       0:03:23.028 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******\nSunday 01 March 2026  21:08:35 +0000 (0:00:00.344)       0:03:23.373 **********\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)\n\nTASK [vexxhost.ceph.cephadm : Install packages] ********************************\nSunday 01 March 2026  21:08:35 +0000 (0:00:00.059)       0:03:23.432 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure services are started] *********************\nSunday 01 March 2026  21:08:36 +0000 (0:00:01.216)       0:03:24.649 **********\nok: [instance] => (item=chronyd)\nok: [instance] => (item=sshd)\n\nTASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************\nSunday 01 March 2026  21:08:37 +0000 (0:00:00.662)       0:03:25.312 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************\nSunday 01 March 2026  21:08:37 +0000 (0:00:00.299)       0:03:25.611 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************\nSunday 01 March 2026  21:08:38 +0000 (0:00:00.202)       0:03:25.813 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***\nSunday 01 March 2026  21:08:38 +0000 (0:00:00.235)       0:03:26.049 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get monitor status] **********************************\nSunday 01 March 2026  21:08:38 +0000 (0:00:00.208)       0:03:26.258 **********\nok: [instance] => (item=instance)\n\nTASK [vexxhost.ceph.osd : Select admin host] ***********************************\nSunday 01 March 2026  21:08:38 +0000 (0:00:00.217)       0:03:26.476 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get `cephadm ls` status] *****************************\nSunday 01 March 2026  21:08:38 +0000 (0:00:00.048)       0:03:26.524 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Parse the `cephadm ls` output] ***********************\nSunday 01 March 2026  21:08:43 +0000 (0:00:04.273)       0:03:30.797 **********\nok: [instance]\n\nTASK [Install Ceph host] *******************************************************\nSunday 01 March 2026  21:08:43 +0000 (0:00:00.049)       0:03:30.847 **********\nincluded: vexxhost.ceph.cephadm_host for instance\n\nTASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******\nSunday 01 March 2026  21:08:43 +0000 (0:00:00.070)       0:03:30.917 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***\nSunday 01 March 2026  21:08:43 +0000 (0:00:00.049)       0:03:30.966 **********\nskipping: [instance] => (item=instance)\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********\nSunday 01 March 2026  21:08:43 +0000 (0:00:00.047)       0:03:31.014 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************\nSunday 01 March 2026  21:08:43 +0000 (0:00:00.256)       0:03:31.270 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Adopt OSDs to cluster] *******************************\nSunday 01 March 2026  21:08:45 +0000 (0:00:01.998)       0:03:33.268 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.osd : Wait until OSD added to cephadm] *********************\nSunday 01 March 2026  21:08:45 +0000 (0:00:00.028)       0:03:33.296 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.osd : Ensure all OSDs are non-legacy] **********************\nSunday 01 March 2026  21:08:45 +0000 (0:00:00.026)       0:03:33.322 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get `ceph-volume lvm list` status] *******************\nSunday 01 March 2026  21:08:50 +0000 (0:00:05.279)       0:03:38.602 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Install OSDs] ****************************************\nSunday 01 March 2026  21:09:01 +0000 (0:00:10.346)       0:03:48.949 **********\nok: [instance] => (item=/dev/ceph-instance-osd0/data)\nok: [instance] => (item=/dev/ceph-instance-osd1/data)\nok: [instance] => (item=/dev/ceph-instance-osd2/data)\n\nTASK [vexxhost.ceph.osd : Get mon dump] ****************************************\nSunday 01 March 2026  21:10:29 +0000 (0:01:27.985)       0:05:16.935 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Mark require osd release] ****************************\nSunday 01 March 2026  21:10:30 +0000 (0:00:01.649)       0:05:18.584 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Wait for all OSD to be running] **********************\nSunday 01 March 2026  21:10:32 +0000 (0:00:01.666)       0:05:20.251 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/osd/tasks/check-osds.yml for instance\n\nTASK [vexxhost.ceph.osd : Set the retry count] *********************************\nSunday 01 March 2026  21:10:32 +0000 (0:00:00.061)       0:05:20.312 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get `ceph orch ps`] **********************************\nSunday 01 March 2026  21:10:32 +0000 (0:00:00.044)       0:05:20.357 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : OSD daemon list] *************************************\nSunday 01 March 2026  21:10:34 +0000 (0:00:01.612)       0:05:21.969 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Fail if any OSD not running] *************************\nSunday 01 March 2026  21:10:34 +0000 (0:00:00.048)       0:05:22.017 **********\nskipping: [instance] => (item=1)\nskipping: [instance] => (item=1)\nskipping: [instance] => (item=1)\nskipping: [instance]\n\nTASK [vexxhost.ceph.osd : Fail if any duplicate OSD ID] ************************\nSunday 01 March 2026  21:10:34 +0000 (0:00:00.055)       0:05:22.073 **********\nskipping: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Ensure RBD kernel module is loaded] **************************************\nSunday 01 March 2026  21:10:34 +0000 (0:00:00.043)       0:05:22.116 **********\nchanged: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:10:34 +0000 (0:00:00.367)       0:05:22.484 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.sysctl : Configure sysctl values] ********************\nSunday 01 March 2026  21:10:35 +0000 (0:00:00.940)       0:05:23.425 **********\nchanged: [instance] => (item={'name': 'fs.aio-max-nr', 'value': 1048576})\nchanged: [instance] => (item={'name': 'net.ipv4.tcp_timestamps', 'value': 0})\nchanged: [instance] => (item={'name': 'net.ipv4.tcp_sack', 'value': 1})\nchanged: [instance] => (item={'name': 'net.core.netdev_budget', 'value': 1000})\nchanged: [instance] => (item={'name': 'net.core.netdev_max_backlog', 'value': 250000})\nchanged: [instance] => (item={'name': 'net.core.rmem_max', 'value': 4194304})\nchanged: [instance] => (item={'name': 'net.core.wmem_max', 'value': 4194304})\nchanged: [instance] => (item={'name': 'net.core.rmem_default', 'value': 4194304})\nchanged: [instance] => (item={'name': 'net.core.wmem_default', 'value': 4194304})\nchanged: [instance] => (item={'name': 'net.core.optmem_max', 'value': 4194304})\nchanged: [instance] => (item={'name': 'net.ipv4.tcp_rmem', 'value': '4096 87380 4194304'})\nchanged: [instance] => (item={'name': 'net.ipv4.tcp_wmem', 'value': '4096 65536 4194304'})\nchanged: [instance] => (item={'name': 'net.ipv4.tcp_low_latency', 'value': 1})\nchanged: [instance] => (item={'name': 'net.ipv4.tcp_adv_win_scale', 'value': 1})\nchanged: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh1', 'value': 128})\nchanged: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh2', 'value': 28872})\nchanged: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh3', 'value': 32768})\nchanged: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh1', 'value': 128})\nchanged: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh2', 'value': 28872})\nchanged: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh3', 'value': 32768})\n\nTASK [vexxhost.atmosphere.ethtool : Create folder for persistent configuration] ***\nSunday 01 March 2026  21:10:39 +0000 (0:00:03.602)       0:05:27.027 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.ethtool : Install persistent \"ethtool\" tuning] *******\nSunday 01 March 2026  21:10:39 +0000 (0:00:00.186)       0:05:27.214 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.ethtool : Run \"ethtool\" tuning] **********************\nSunday 01 March 2026  21:10:39 +0000 (0:00:00.436)       0:05:27.650 **********\nok: [instance]\n\nTASK [Set a fact with the \"atmosphere_images\" for other plays] *****************\nSunday 01 March 2026  21:10:40 +0000 (0:00:00.231)       0:05:27.881 **********\nok: [instance]\n\nPLAY [Configure Kubernetes VIP] ************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:10:40 +0000 (0:00:00.050)       0:05:27.932 **********\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/etc/kubernetes/manifests)] ***\nSunday 01 March 2026  21:10:41 +0000 (0:00:00.918)       0:05:28.850 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Uninstall legacy HA stack] ****************\nSunday 01 March 2026  21:10:41 +0000 (0:00:00.205)       0:05:29.056 **********\nok: [instance] => (item=/etc/keepalived/keepalived.conf)\nok: [instance] => (item=/etc/keepalived/check_apiserver.sh)\nok: [instance] => (item=/etc/kubernetes/manifests/keepalived.yaml)\nok: [instance] => (item=/etc/haproxy/haproxy.cfg)\nok: [instance] => (item=/etc/kubernetes/manifests/haproxy.yaml)\n\nTASK [vexxhost.kubernetes.kube_vip : Switch API server to run on port 6443] ****\nSunday 01 March 2026  21:10:42 +0000 (0:00:00.843)       0:05:29.900 **********\nfailed: [instance] (item=/etc/kubernetes/manifests/kube-apiserver.yaml) => {\"ansible_loop_var\": \"item\", \"changed\": false, \"item\": \"/etc/kubernetes/manifests/kube-apiserver.yaml\", \"msg\": \"Path /etc/kubernetes/manifests/kube-apiserver.yaml does not exist !\", \"rc\": 257}\nfailed: [instance] (item=/etc/kubernetes/controller-manager.conf) => {\"ansible_loop_var\": \"item\", \"changed\": false, \"item\": \"/etc/kubernetes/controller-manager.conf\", \"msg\": \"Path /etc/kubernetes/controller-manager.conf does not exist !\", \"rc\": 257}\nfailed: [instance] (item=/etc/kubernetes/scheduler.conf) => {\"ansible_loop_var\": \"item\", \"changed\": false, \"item\": \"/etc/kubernetes/scheduler.conf\", \"msg\": \"Path /etc/kubernetes/scheduler.conf does not exist !\", \"rc\": 257}\n...ignoring\n\nTASK [vexxhost.kubernetes.kube_vip : Check if super-admin.conf exists] *********\nSunday 01 March 2026  21:10:42 +0000 (0:00:00.505)       0:05:30.406 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Check if kubeadm has already run] *********\nSunday 01 March 2026  21:10:42 +0000 (0:00:00.196)       0:05:30.603 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path] ************\nSunday 01 March 2026  21:10:43 +0000 (0:00:00.185)       0:05:30.788 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path (with super-admin.conf)] ***\nSunday 01 March 2026  21:10:43 +0000 (0:00:00.036)       0:05:30.825 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Upload Kubernetes manifest] ***************\nSunday 01 March 2026  21:10:43 +0000 (0:00:00.040)       0:05:30.865 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Ensure kube-vip configuration file] *******\nSunday 01 March 2026  21:10:43 +0000 (0:00:00.521)       0:05:31.387 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Flush handlers] ***************************\nSunday 01 March 2026  21:10:43 +0000 (0:00:00.191)       0:05:31.578 **********\n\nPLAY [Install Kubernetes] ******************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:10:43 +0000 (0:00:00.056)       0:05:31.634 **********\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  21:10:44 +0000 (0:00:00.885)       0:05:32.520 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  21:10:44 +0000 (0:00:00.209)       0:05:32.730 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nSunday 01 March 2026  21:10:45 +0000 (0:00:00.046)       0:05:32.777 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:10:45 +0000 (0:00:00.208)       0:05:32.985 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:10:45 +0000 (0:00:00.060)       0:05:33.045 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:10:45 +0000 (0:00:00.301)       0:05:33.347 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  21:10:45 +0000 (0:00:00.055)       0:05:33.403 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  21:10:45 +0000 (0:00:00.198)       0:05:33.602 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:10:46 +0000 (0:00:01.004)       0:05:34.606 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:10:46 +0000 (0:00:00.069)       0:05:34.675 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:10:47 +0000 (0:00:00.332)       0:05:35.007 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nSunday 01 March 2026  21:10:49 +0000 (0:00:01.915)       0:05:36.923 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nSunday 01 March 2026  21:10:49 +0000 (0:00:00.034)       0:05:36.958 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nSunday 01 March 2026  21:10:49 +0000 (0:00:00.038)       0:05:36.996 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nSunday 01 March 2026  21:10:49 +0000 (0:00:00.180)       0:05:37.177 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nSunday 01 March 2026  21:10:50 +0000 (0:00:00.958)       0:05:38.136 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nSunday 01 March 2026  21:10:50 +0000 (0:00:00.438)       0:05:38.574 **********\nok: [instance] => (item={'path': '/etc/containerd'})\nok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nSunday 01 March 2026  21:10:51 +0000 (0:00:00.905)       0:05:39.479 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nSunday 01 March 2026  21:10:52 +0000 (0:00:00.469)       0:05:39.949 **********\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nSunday 01 March 2026  21:10:52 +0000 (0:00:00.007)       0:05:39.957 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the \"kubeadm-config\" ConfigMap] ***\nSunday 01 March 2026  21:10:52 +0000 (0:00:00.345)       0:05:40.303 **********\nfatal: [instance]: FAILED! => {\"changed\": false, \"msg\": \"Failed to import the required Python library (kubernetes) on instance's Python /usr/bin/python3.10. Please read the module documentation and install it in the appropriate location. If the required library is installed, but Ansible is using the wrong Python interpreter, please consult the documentation on ansible_python_interpreter\"}\n...ignoring\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Parse the ClusterConfiguration] ***\nSunday 01 March 2026  21:10:53 +0000 (0:00:00.585)       0:05:40.888 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the current Kubernetes version] ***\nSunday 01 March 2026  21:10:53 +0000 (0:00:00.033)       0:05:40.922 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Extract major, minor, and patch versions] ***\nSunday 01 March 2026  21:10:53 +0000 (0:00:00.043)       0:05:40.965 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Fail if we're jumping more than one minor version] ***\nSunday 01 March 2026  21:10:53 +0000 (0:00:00.037)       0:05:41.003 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Set fact if we need to upgrade] ***\nSunday 01 March 2026  21:10:53 +0000 (0:00:00.038)       0:05:41.041 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  21:10:53 +0000 (0:00:00.042)       0:05:41.083 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:10:53 +0000 (0:00:00.210)       0:05:41.294 **********\nok: [instance] => {\n    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubeadm\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:10:53 +0000 (0:00:00.049)       0:05:41.343 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:10:54 +0000 (0:00:00.678)       0:05:42.021 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  21:10:54 +0000 (0:00:00.048)       0:05:42.069 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:10:54 +0000 (0:00:00.201)       0:05:42.271 **********\nok: [instance] => {\n    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubectl\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:10:54 +0000 (0:00:00.050)       0:05:42.321 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:10:55 +0000 (0:00:01.093)       0:05:43.415 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nSunday 01 March 2026  21:10:55 +0000 (0:00:00.063)       0:05:43.478 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nSunday 01 March 2026  21:10:55 +0000 (0:00:00.043)       0:05:43.522 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nSunday 01 March 2026  21:10:55 +0000 (0:00:00.037)       0:05:43.559 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nSunday 01 March 2026  21:10:55 +0000 (0:00:00.040)       0:05:43.600 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nSunday 01 March 2026  21:10:56 +0000 (0:00:01.123)       0:05:44.723 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nSunday 01 March 2026  21:10:57 +0000 (0:00:00.443)       0:05:45.166 **********\nok: [instance] => (item={'path': '/etc/containerd'})\nok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nSunday 01 March 2026  21:10:58 +0000 (0:00:00.893)       0:05:46.059 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nSunday 01 March 2026  21:10:58 +0000 (0:00:00.481)       0:05:46.541 **********\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nSunday 01 March 2026  21:10:58 +0000 (0:00:00.007)       0:05:46.549 **********\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  21:10:59 +0000 (0:00:00.357)       0:05:46.906 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:10:59 +0000 (0:00:00.210)       0:05:47.116 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/crictl-v1.34.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:10:59 +0000 (0:00:00.060)       0:05:47.176 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:11:00 +0000 (0:00:00.622)       0:05:47.798 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:11:01 +0000 (0:00:01.340)       0:05:49.139 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/critest-v1.34.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:11:01 +0000 (0:00:00.054)       0:05:49.194 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:11:02 +0000 (0:00:00.675)       0:05:49.869 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.cri_tools : Create crictl config] ********************\nSunday 01 March 2026  21:11:03 +0000 (0:00:01.394)       0:05:51.264 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/opt/cni/bin)] *********\nSunday 01 March 2026  21:11:03 +0000 (0:00:00.450)       0:05:51.714 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  21:11:04 +0000 (0:00:00.222)       0:05:51.937 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:11:04 +0000 (0:00:00.204)       0:05:52.142 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/containernetworking/plugins/releases/download/v1.8.0/cni-plugins-linux-amd64-v1.8.0.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:11:04 +0000 (0:00:00.060)       0:05:52.202 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:11:05 +0000 (0:00:00.814)       0:05:53.017 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Gather variables for each operating system] ***\nSunday 01 March 2026  21:11:07 +0000 (0:00:02.674)       0:05:55.691 **********\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/containers/roles/cni_plugins/vars/debian.yml)\n\nTASK [vexxhost.containers.cni_plugins : Install additional packages] ***********\nSunday 01 March 2026  21:11:08 +0000 (0:00:00.051)       0:05:55.742 **********\nok: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Ensure IPv6 is enabled] ****************\nSunday 01 March 2026  21:11:08 +0000 (0:00:00.973)       0:05:56.715 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Enable kernel modules on-boot] *********\nSunday 01 March 2026  21:11:09 +0000 (0:00:00.194)       0:05:56.910 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Enable kernel modules in runtime] ******\nSunday 01 March 2026  21:11:09 +0000 (0:00:00.441)       0:05:57.351 **********\nchanged: [instance] => (item=br_netfilter)\nok: [instance] => (item=ip_tables)\nchanged: [instance] => (item=ip6_tables)\nok: [instance] => (item=nf_conntrack)\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  21:11:10 +0000 (0:00:00.726)       0:05:58.077 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:11:10 +0000 (0:00:00.199)       0:05:58.277 **********\nok: [instance] => {\n    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubelet\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:11:10 +0000 (0:00:00.052)       0:05:58.329 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:11:12 +0000 (0:00:01.632)       0:05:59.962 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Gather variables for each operating system] ***\nSunday 01 March 2026  21:11:12 +0000 (0:00:00.038)       0:06:00.001 **********\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubelet/vars/debian.yml)\n\nTASK [vexxhost.kubernetes.kubelet : Install coreutils] *************************\nSunday 01 March 2026  21:11:12 +0000 (0:00:00.063)       0:06:00.064 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Install additional packages] ***************\nSunday 01 March 2026  21:11:12 +0000 (0:00:00.040)       0:06:00.104 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Configure sysctl values] *******************\nSunday 01 March 2026  21:11:16 +0000 (0:00:03.940)       0:06:04.045 **********\nchanged: [instance] => (item={'name': 'net.ipv4.ip_forward', 'value': 1})\nchanged: [instance] => (item={'name': 'net.bridge.bridge-nf-call-iptables', 'value': 1})\nchanged: [instance] => (item={'name': 'net.bridge.bridge-nf-call-ip6tables', 'value': 1})\nchanged: [instance] => (item={'name': 'net.ipv4.conf.all.rp_filter', 'value': 0})\nchanged: [instance] => (item={'name': 'fs.inotify.max_queued_events', 'value': 1048576})\nchanged: [instance] => (item={'name': 'fs.inotify.max_user_instances', 'value': 8192})\nchanged: [instance] => (item={'name': 'fs.inotify.max_user_watches', 'value': 1048576})\n\nTASK [vexxhost.kubernetes.kubelet : Create folders for kubernetes configuration] ***\nSunday 01 March 2026  21:11:17 +0000 (0:00:01.264)       0:06:05.309 **********\nchanged: [instance] => (item=/etc/systemd/system/kubelet.service.d)\nok: [instance] => (item=/etc/kubernetes)\nok: [instance] => (item=/etc/kubernetes/manifests)\n\nTASK [vexxhost.kubernetes.kubelet : Add kubelet systemd service config] ********\nSunday 01 March 2026  21:11:18 +0000 (0:00:00.533)       0:06:05.842 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Add kubeadm dropin for kubelet systemd service config] ***\nSunday 01 March 2026  21:11:18 +0000 (0:00:00.457)       0:06:06.300 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Check swap status] *************************\nSunday 01 March 2026  21:11:19 +0000 (0:00:00.460)       0:06:06.761 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Disable swap] ******************************\nSunday 01 March 2026  21:11:19 +0000 (0:00:00.214)       0:06:06.975 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Remove swapfile from /etc/fstab] ***********\nSunday 01 March 2026  21:11:19 +0000 (0:00:00.034)       0:06:07.009 **********\nok: [instance] => (item=swap)\nok: [instance] => (item=none)\n\nTASK [vexxhost.kubernetes.kubelet : Create noswap systemd service config file] ***\nSunday 01 March 2026  21:11:19 +0000 (0:00:00.557)       0:06:07.566 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Enable noswap service] *********************\nSunday 01 March 2026  21:11:20 +0000 (0:00:00.471)       0:06:08.038 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Force any restarts if necessary] ***********\nSunday 01 March 2026  21:11:20 +0000 (0:00:00.567)       0:06:08.606 **********\n\nRUNNING HANDLER [vexxhost.kubernetes.kubelet : Reload systemd] *****************\nSunday 01 March 2026  21:11:20 +0000 (0:00:00.008)       0:06:08.614 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Enable and start kubelet service] **********\nSunday 01 March 2026  21:11:21 +0000 (0:00:00.752)       0:06:09.366 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Ensure availability of dbus on Debian] *****\nSunday 01 March 2026  21:11:22 +0000 (0:00:00.583)       0:06:09.949 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Configure short hostname] ******************\nSunday 01 March 2026  21:11:23 +0000 (0:00:00.961)       0:06:10.911 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Ensure hostname inside hosts file] *********\nSunday 01 March 2026  21:11:23 +0000 (0:00:00.729)       0:06:11.641 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Remove kubernetes repository] ***********\nSunday 01 March 2026  21:11:24 +0000 (0:00:00.217)       0:06:11.858 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Setup control plane] ********************\nSunday 01 March 2026  21:11:24 +0000 (0:00:00.528)       0:06:12.386 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/control-plane.yml for instance\n\nTASK [vexxhost.kubernetes.kubernetes : Bootstrap cluster] **********************\nSunday 01 March 2026  21:11:24 +0000 (0:00:00.100)       0:06:12.487 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/bootstrap-cluster.yml for instance\n\nTASK [vexxhost.kubernetes.kubernetes : Check if any control plane is bootstrapped] ***\nSunday 01 March 2026  21:11:24 +0000 (0:00:00.090)       0:06:12.578 **********\nok: [instance] => (item=instance)\n\nTASK [vexxhost.kubernetes.kubernetes : Pick node from pre-existing cluster] ****\nSunday 01 March 2026  21:11:25 +0000 (0:00:00.211)       0:06:12.789 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Select first node to initialize cluster] ***\nSunday 01 March 2026  21:11:25 +0000 (0:00:00.048)       0:06:12.838 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Print selected bootstrap node] **********\nSunday 01 March 2026  21:11:25 +0000 (0:00:00.056)       0:06:12.894 **********\nok: [instance] => {\n    \"msg\": \"instance\"\n}\n\nTASK [vexxhost.kubernetes.kubernetes : Upload cluster configuration for bootstrap node] ***\nSunday 01 March 2026  21:11:25 +0000 (0:00:00.042)       0:06:12.937 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create pki folder] **********************\nSunday 01 March 2026  21:11:25 +0000 (0:00:00.543)       0:06:13.480 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca key] ***************\nSunday 01 March 2026  21:11:25 +0000 (0:00:00.043)       0:06:13.523 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca cert] **************\nSunday 01 March 2026  21:11:25 +0000 (0:00:00.037)       0:06:13.561 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create etcd-ca key] *********************\nSunday 01 March 2026  21:11:25 +0000 (0:00:00.036)       0:06:13.597 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create etcd-ca cert] ********************\nSunday 01 March 2026  21:11:25 +0000 (0:00:00.039)       0:06:13.637 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca key] **************\nSunday 01 March 2026  21:11:25 +0000 (0:00:00.039)       0:06:13.677 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca cert] *************\nSunday 01 March 2026  21:11:25 +0000 (0:00:00.035)       0:06:13.713 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Initialize cluster] *********************\nSunday 01 March 2026  21:11:26 +0000 (0:00:00.035)       0:06:13.748 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Check if the node is already part of the cluster] ***\nSunday 01 March 2026  21:11:46 +0000 (0:00:20.691)       0:06:34.440 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Join cluster] ***************************\nSunday 01 March 2026  21:11:46 +0000 (0:00:00.233)       0:06:34.673 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create folder for admin configuration] ***\nSunday 01 March 2026  21:11:46 +0000 (0:00:00.040)       0:06:34.714 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Creating a symlink for admin configuration file] ***\nSunday 01 March 2026  21:11:47 +0000 (0:00:00.215)       0:06:34.930 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Add bash autocomplete for kubectl] ******\nSunday 01 March 2026  21:11:47 +0000 (0:00:00.214)       0:06:35.145 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Install PIP] ****************************\nSunday 01 March 2026  21:11:47 +0000 (0:00:00.198)       0:06:35.344 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using pip on supported systems] ***\nSunday 01 March 2026  21:11:51 +0000 (0:00:03.886)       0:06:39.231 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Enable EPEL repository] *****************\nSunday 01 March 2026  21:11:54 +0000 (0:00:03.140)       0:06:42.371 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using package manager on supported systems] ***\nSunday 01 March 2026  21:11:54 +0000 (0:00:00.048)       0:06:42.420 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Allow workload on control plane node] ***\nSunday 01 March 2026  21:11:54 +0000 (0:00:00.049)       0:06:42.470 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Remove kube-proxy resources] ************\nSunday 01 March 2026  21:11:55 +0000 (0:00:00.727)       0:06:43.197 **********\nskipping: [instance] => (item=DaemonSet)\nskipping: [instance] => (item=ConfigMap)\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Start an upgrade] ***********************\nSunday 01 March 2026  21:11:55 +0000 (0:00:00.040)       0:06:43.238 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Check if the Kubernetes API services is up to date] ***\nSunday 01 March 2026  21:11:55 +0000 (0:00:00.044)       0:06:43.282 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Trigger an upgrade of the Kubernetes API services] ***\nSunday 01 March 2026  21:11:55 +0000 (0:00:00.044)       0:06:43.327 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Setup nodes] ****************************\nSunday 01 March 2026  21:11:55 +0000 (0:00:00.054)       0:06:43.382 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Upgrade & restart Kubelet node for upgrade (if needed)] ***\nSunday 01 March 2026  21:11:55 +0000 (0:00:00.044)       0:06:43.426 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Set node selector for CoreDNS components] ***\nSunday 01 March 2026  21:11:56 +0000 (0:00:00.486)       0:06:43.912 **********\nchanged: [instance]\n\nPLAY [Install control-plane components] ****************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:11:57 +0000 (0:00:00.953)       0:06:44.866 **********\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  21:11:58 +0000 (0:00:00.988)       0:06:45.854 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  21:11:58 +0000 (0:00:00.201)       0:06:46.056 **********\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nSunday 01 March 2026  21:11:59 +0000 (0:00:01.152)       0:06:47.209 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:11:59 +0000 (0:00:00.216)       0:06:47.425 **********\nok: [instance] => {\n    \"msg\": \"https://get.helm.sh/helm-v3.11.2-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:11:59 +0000 (0:00:00.046)       0:06:47.472 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:12:00 +0000 (0:00:00.607)       0:06:48.080 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.helm : Remove Helm repository] ***********************\nSunday 01 March 2026  21:12:01 +0000 (0:00:01.504)       0:06:49.584 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.helm : Add bash autocomplete for helm] ***************\nSunday 01 March 2026  21:12:02 +0000 (0:00:00.325)       0:06:49.909 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.helm : Get Helm plugins dir] *************************\nSunday 01 March 2026  21:12:02 +0000 (0:00:00.208)       0:06:50.118 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.helm : Create Helm plugins directory if it does not exist] ***\nSunday 01 March 2026  21:12:02 +0000 (0:00:00.284)       0:06:50.403 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.helm : Remove Helm diff plugin installed by kubernetes.core.helm_plugin] ***\nSunday 01 March 2026  21:12:02 +0000 (0:00:00.195)       0:06:50.599 **********\nok: [instance]\n\nTASK [Install plugin] **********************************************************\nSunday 01 March 2026  21:12:03 +0000 (0:00:00.211)       0:06:50.810 **********\nincluded: vexxhost.containers.download_artifact for instance\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:12:03 +0000 (0:00:00.053)       0:06:50.863 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/databus23/helm-diff/releases/download/v3.8.1/helm-diff-linux-amd64.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:12:03 +0000 (0:00:00.048)       0:06:50.911 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:12:03 +0000 (0:00:00.645)       0:06:51.557 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:12:05 +0000 (0:00:01.777)       0:06:53.334 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:12:05 +0000 (0:00:00.041)       0:06:53.376 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.cilium : Get current Kubernetes version] *************\nSunday 01 March 2026  21:12:06 +0000 (0:00:00.647)       0:06:54.024 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.cilium : Deploy Helm chart] **************************\nSunday 01 March 2026  21:12:07 +0000 (0:00:00.857)       0:06:54.882 **********\nchanged: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:12:09 +0000 (0:00:02.093)       0:06:56.975 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kubernetes_node_labels : Add labels to node] *********\nSunday 01 March 2026  21:12:10 +0000 (0:00:01.046)       0:06:58.022 **********\nchanged: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Uninstall unattended-upgrades] *******************************************\nSunday 01 March 2026  21:12:10 +0000 (0:00:00.688)       0:06:58.710 **********\nok: [instance]\n\nPLAY [controllers] *************************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:12:11 +0000 (0:00:00.696)       0:06:59.407 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:12:12 +0000 (0:00:01.075)       0:07:00.482 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:12:12 +0000 (0:00:00.038)       0:07:00.520 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.local_path_provisioner : Deploy Helm chart] **********\nSunday 01 March 2026  21:12:13 +0000 (0:00:00.463)       0:07:00.983 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:12:14 +0000 (0:00:01.284)       0:07:02.268 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Collect \"ceph mon dump\" output from a monitor] ***\nSunday 01 March 2026  21:12:14 +0000 (0:00:00.038)       0:07:02.307 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Generate fact with list of Ceph monitors] ***\nSunday 01 March 2026  21:12:14 +0000 (0:00:00.041)       0:07:02.348 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Create Ceph pool] *********************\nSunday 01 March 2026  21:12:14 +0000 (0:00:00.029)       0:07:02.377 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Create user client.kube] **************\nSunday 01 March 2026  21:12:14 +0000 (0:00:00.045)       0:07:02.423 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Retrieve keyring for client.kube] *****\nSunday 01 March 2026  21:12:14 +0000 (0:00:00.049)       0:07:02.472 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Store keyring inside fact] ************\nSunday 01 March 2026  21:12:14 +0000 (0:00:00.036)       0:07:02.509 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Deploy Helm chart] ********************\nSunday 01 March 2026  21:12:14 +0000 (0:00:00.035)       0:07:02.544 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Clone PowerStore CSI from GitHub] ***\nSunday 01 March 2026  21:12:14 +0000 (0:00:00.037)       0:07:02.582 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Create Secret] **********************\nSunday 01 March 2026  21:12:14 +0000 (0:00:00.035)       0:07:02.618 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Create StorageClass] ****************\nSunday 01 March 2026  21:12:14 +0000 (0:00:00.030)       0:07:02.649 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Deploy Helm chart] ******************\nSunday 01 March 2026  21:12:14 +0000 (0:00:00.030)       0:07:02.679 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Add backports PPA] **********************\nSunday 01 March 2026  21:12:14 +0000 (0:00:00.042)       0:07:02.721 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Install the multipathd package] *********\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.030)       0:07:02.752 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Install the configuration file] *********\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.028)       0:07:02.780 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Detect if InitiatorName is set] ***********\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.033)       0:07:02.813 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Generate a new InitiatorName] *************\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.030)       0:07:02.844 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Write the new InitiatorName] **************\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.032)       0:07:02.876 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Create namespace] *************************\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.035)       0:07:02.911 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Install Portworx] *************************\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.034)       0:07:02.946 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Wait till the CRDs are created] ***********\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.029)       0:07:02.975 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Create Portworx Storage Cluster] **********\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.032)       0:07:03.008 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.storpool_csi : Deploy CSI RBAC] **********************\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.043)       0:07:03.052 **********\nskipping: [instance] => (item={'name': 'controllerplugin'})\nskipping: [instance] => (item={'name': 'nodeplugin'})\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.storpool_csi : Deploy CSI] ***************************\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.036)       0:07:03.088 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.storpool_csi : Create StorageClass] ******************\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.031)       0:07:03.120 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ibm_block_csi_driver : Deploy CSI] *******************\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.044)       0:07:03.164 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ibm_block_csi_driver : Create Secret] ****************\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.035)       0:07:03.200 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ibm_block_csi_driver : Create StorageClass] **********\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.031)       0:07:03.231 **********\nskipping: [instance]\n\nPLAY [Deploy Infrastructure] ***************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:12:15 +0000 (0:00:00.068)       0:07:03.300 **********\nok: [instance]\n\nTASK [Deploy Helm chart] *******************************************************\nSunday 01 March 2026  21:12:16 +0000 (0:00:00.979)       0:07:04.279 **********\nincluded: vexxhost.kubernetes.cert_manager for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:12:16 +0000 (0:00:00.050)       0:07:04.330 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:12:16 +0000 (0:00:00.040)       0:07:04.370 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.cert_manager : Deploy Helm chart] ********************\nSunday 01 March 2026  21:12:17 +0000 (0:00:00.467)       0:07:04.838 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.cluster_issuer : Create self-signed cluster issuer] ***\nSunday 01 March 2026  21:12:46 +0000 (0:00:29.016)       0:07:33.854 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.cluster_issuer : Import tasks for ClusterIssuer type] ***\nSunday 01 March 2026  21:12:46 +0000 (0:00:00.706)       0:07:34.561 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/atmosphere/roles/cluster_issuer/tasks/type/self-signed/main.yml for instance\n\nTASK [vexxhost.atmosphere.cluster_issuer : Create ClusterIssuer] ***************\nSunday 01 March 2026  21:12:46 +0000 (0:00:00.040)       0:07:34.601 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.cluster_issuer : Wait till the secret is created] ****\nSunday 01 March 2026  21:12:47 +0000 (0:00:00.750)       0:07:35.352 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.cluster_issuer : Copy CA certificate on host] ********\nSunday 01 March 2026  21:12:48 +0000 (0:00:00.767)       0:07:36.120 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.cluster_issuer : Flush all handlers] *****************\nSunday 01 March 2026  21:12:48 +0000 (0:00:00.448)       0:07:36.569 **********\n\nRUNNING HANDLER [vexxhost.atmosphere.cluster_issuer : Update CA certificates on host] ***\nSunday 01 March 2026  21:12:48 +0000 (0:00:00.005)       0:07:36.574 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:12:50 +0000 (0:00:01.586)       0:07:38.161 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:12:50 +0000 (0:00:00.049)       0:07:38.211 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.ingress_nginx : Deploy Helm chart] *******************\nSunday 01 March 2026  21:12:50 +0000 (0:00:00.498)       0:07:38.710 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:13:04 +0000 (0:00:13.172)       0:07:51.882 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:13:04 +0000 (0:00:00.045)       0:07:51.928 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq_cluster_operator : Install all CRDs] ********\nSunday 01 March 2026  21:13:04 +0000 (0:00:00.480)       0:07:52.408 **********\nok: [instance] => (item=messaging-topology-operator)\nok: [instance] => (item=rabbitmq-cluster)\n\nTASK [vexxhost.atmosphere.rabbitmq_cluster_operator : Deploy Helm chart] *******\nSunday 01 March 2026  21:13:07 +0000 (0:00:02.845)       0:07:55.253 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:13:10 +0000 (0:00:02.651)       0:07:57.905 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:13:10 +0000 (0:00:00.048)       0:07:57.954 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster_operator : Install all CRDs] ***\nSunday 01 March 2026  21:13:10 +0000 (0:00:00.471)       0:07:58.425 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster_operator : Deploy Helm chart] ***\nSunday 01 March 2026  21:13:13 +0000 (0:00:03.047)       0:08:01.472 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Check if the Percona XtraDB cluster secret exists] ***\nSunday 01 March 2026  21:13:15 +0000 (0:00:01.985)       0:08:03.457 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Create a secret] ************\nSunday 01 March 2026  21:13:16 +0000 (0:00:00.639)       0:08:04.097 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Check if the Percona XtraDB cluster exists] ***\nSunday 01 March 2026  21:13:17 +0000 (0:00:00.733)       0:08:04.830 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Get current status of the cluster] ***\nSunday 01 March 2026  21:13:18 +0000 (0:00:00.929)       0:08:05.760 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Assert that the cluster is healthy before upgrade] ***\nSunday 01 March 2026  21:13:18 +0000 (0:00:00.046)       0:08:05.806 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Stop PXC-operator] **********\nSunday 01 March 2026  21:13:18 +0000 (0:00:00.050)       0:08:05.857 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Change the cluster Statefulset image to 8.0] ***\nSunday 01 March 2026  21:13:18 +0000 (0:00:00.038)       0:08:05.895 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Wait until the cluster Statefulset rollout] ***\nSunday 01 March 2026  21:13:18 +0000 (0:00:00.044)       0:08:05.940 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Update pxc cluster spec] ****\nSunday 01 March 2026  21:13:18 +0000 (0:00:00.043)       0:08:05.983 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Start PXC-operator] *********\nSunday 01 March 2026  21:13:18 +0000 (0:00:00.044)       0:08:06.028 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Apply Percona XtraDB cluster] ***\nSunday 01 March 2026  21:13:18 +0000 (0:00:00.041)       0:08:06.069 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Create percona haproxy metric service] ***\nSunday 01 March 2026  21:14:49 +0000 (0:01:31.583)       0:09:37.652 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:14:50 +0000 (0:00:00.691)       0:09:38.344 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:14:50 +0000 (0:00:00.045)       0:09:38.389 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.valkey : Create TLS resources] ***********************\nSunday 01 March 2026  21:14:51 +0000 (0:00:00.490)       0:09:38.880 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.valkey : Deploy Helm chart] **************************\nSunday 01 March 2026  21:14:51 +0000 (0:00:00.713)       0:09:39.593 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:14:53 +0000 (0:00:01.491)       0:09:41.084 **********\nskipping: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:14:53 +0000 (0:00:00.039)       0:09:41.124 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:14:53 +0000 (0:00:00.042)       0:09:41.167 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:14:53 +0000 (0:00:00.033)       0:09:41.200 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:14:53 +0000 (0:00:00.036)       0:09:41.237 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:14:54 +0000 (0:00:00.638)       0:09:41.876 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:14:54 +0000 (0:00:00.050)       0:09:41.926 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:14:54 +0000 (0:00:00.042)       0:09:41.968 **********\nok: [instance] => (item=oslo_db)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:14:54 +0000 (0:00:00.054)       0:09:42.023 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:14:54 +0000 (0:00:00.052)       0:09:42.076 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:14:54 +0000 (0:00:00.044)       0:09:42.121 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Get the Kuberentes service for Percona XtraDB Cluster] ***\nSunday 01 March 2026  21:14:54 +0000 (0:00:00.487)       0:09:42.608 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Install MySQL python package] *************\nSunday 01 March 2026  21:14:55 +0000 (0:00:00.624)       0:09:43.233 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Check MySQL ready] ************************\nSunday 01 March 2026  21:14:56 +0000 (0:00:01.041)       0:09:44.275 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Create Keycloak database] *****************\nSunday 01 March 2026  21:14:56 +0000 (0:00:00.421)       0:09:44.697 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Create a Keycloak user] *******************\nSunday 01 March 2026  21:14:57 +0000 (0:00:00.405)       0:09:45.102 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Disable pxc strict mode] ******************\nSunday 01 March 2026  21:14:57 +0000 (0:00:00.454)       0:09:45.557 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Deploy Helm chart] ************************\nSunday 01 March 2026  21:14:58 +0000 (0:00:00.398)       0:09:45.955 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Wait until keycloak ready] ****************\nSunday 01 March 2026  21:17:31 +0000 (0:02:33.358)       0:12:19.313 **********\nok: [instance]\n\nTASK [Create Keycloak Ingress] *************************************************\nSunday 01 March 2026  21:17:32 +0000 (0:00:00.658)       0:12:19.972 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress keycloak] *******************\nSunday 01 March 2026  21:17:32 +0000 (0:00:00.042)       0:12:20.015 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Enable pxc strict mode] *******************\nSunday 01 March 2026  21:17:33 +0000 (0:00:00.907)       0:12:20.922 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keepalived : Deploy service] *************************\nSunday 01 March 2026  21:17:33 +0000 (0:00:00.260)       0:12:21.183 **********\nchanged: [instance]\n\nPLAY [Deploy Monitoring] *******************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:17:34 +0000 (0:00:00.813)       0:12:21.996 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:17:35 +0000 (0:00:01.561)       0:12:23.558 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:17:35 +0000 (0:00:00.059)       0:12:23.617 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.node_feature_discovery : Install all CRDs] ***********\nSunday 01 March 2026  21:17:36 +0000 (0:00:00.510)       0:12:24.128 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.node_feature_discovery : Deploy Helm chart] **********\nSunday 01 March 2026  21:17:37 +0000 (0:00:00.764)       0:12:24.893 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:17:39 +0000 (0:00:01.997)       0:12:26.891 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:17:39 +0000 (0:00:00.174)       0:12:27.065 **********\nchanged: [instance]\n\nTASK [atmosphere.common.secretgen_controller : Deploy secretgen-controller] ****\nSunday 01 March 2026  21:17:39 +0000 (0:00:00.589)       0:12:27.654 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Wait until Keycloak service is ready] ***\nSunday 01 March 2026  21:17:41 +0000 (0:00:01.166)       0:12:28.821 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Create Keycloak realm] *******\nSunday 01 March 2026  21:17:41 +0000 (0:00:00.718)       0:12:29.540 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Add client roles in \"id_token\"] ***\nSunday 01 March 2026  21:17:49 +0000 (0:00:08.070)       0:12:37.610 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Retrieve \"etcd\" CA certificate] ***\nSunday 01 March 2026  21:17:51 +0000 (0:00:01.700)       0:12:39.311 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Retrieve \"etcd\" client certificate] ***\nSunday 01 March 2026  21:17:51 +0000 (0:00:00.297)       0:12:39.609 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Retrieve \"etcd\" client key] ***\nSunday 01 March 2026  21:17:52 +0000 (0:00:00.196)       0:12:39.805 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Create secrets for monitoring] ***\nSunday 01 March 2026  21:17:52 +0000 (0:00:00.202)       0:12:40.008 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Generate client secret passwords] ***\nSunday 01 March 2026  21:17:53 +0000 (0:00:00.746)       0:12:40.755 **********\nchanged: [instance] => (item=alertmanager)\nchanged: [instance] => (item=grafana)\nchanged: [instance] => (item=prometheus)\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Collect all client secrets] ***\nSunday 01 March 2026  21:18:10 +0000 (0:00:17.058)       0:12:57.814 **********\nok: [instance] => (item=alertmanager)\nok: [instance] => (item=grafana)\nok: [instance] => (item=prometheus)\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Create Keycloak clients] *****\nSunday 01 March 2026  21:18:12 +0000 (0:00:01.931)       0:12:59.745 **********\nchanged: [instance] => (item=None)\nchanged: [instance] => (item=None)\nchanged: [instance] => (item=None)\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Create Keycloak roles] *******\nSunday 01 March 2026  21:18:15 +0000 (0:00:03.975)       0:13:03.721 **********\nchanged: [instance] => (item=None)\nchanged: [instance] => (item=None)\nchanged: [instance] => (item=None)\nchanged: [instance] => (item=None)\nchanged: [instance] => (item=None)\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Generate cookie secrets] *****\nSunday 01 March 2026  21:18:21 +0000 (0:00:05.104)       0:13:08.826 **********\nchanged: [instance] => (item=alertmanager)\nchanged: [instance] => (item=prometheus)\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Generate OAuth2 proxy configuration] ***\nSunday 01 March 2026  21:18:32 +0000 (0:00:11.337)       0:13:20.163 **********\nchanged: [instance] => (item=alertmanager)\nchanged: [instance] => (item=prometheus)\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Create certificate issuer] ***\nSunday 01 March 2026  21:18:43 +0000 (0:00:11.392)       0:13:31.556 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Install all CRDs] ************\nSunday 01 March 2026  21:18:44 +0000 (0:00:00.700)       0:13:32.257 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Deploy additional dashboards] ***\nSunday 01 March 2026  21:18:52 +0000 (0:00:07.857)       0:13:40.115 **********\nchanged: [instance] => (item={'name': 'haproxy', 'state': 'present'})\nchanged: [instance] => (item={'name': 'goldpinger', 'state': 'present'})\nchanged: [instance] => (item={'name': 'node-exporter-full', 'state': 'present'})\nchanged: [instance] => (item={'name': 'ceph-cluster', 'state': 'present'})\nchanged: [instance] => (item={'name': 'ceph-cluster-advanced', 'state': 'present'})\nchanged: [instance] => (item={'name': 'hosts-overview', 'state': 'present'})\nchanged: [instance] => (item={'name': 'host-details', 'state': 'present'})\nchanged: [instance] => (item={'name': 'pool-overview', 'state': 'present'})\nchanged: [instance] => (item={'name': 'pool-detail', 'state': 'present'})\nchanged: [instance] => (item={'name': 'osds-overview', 'state': 'present'})\nchanged: [instance] => (item={'name': 'osd-device-details', 'state': 'present'})\nchanged: [instance] => (item={'name': 'rbd-overview', 'state': 'present'})\nchanged: [instance] => (item={'name': 'rbd-details', 'state': 'present'})\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Deploy Helm chart] ***********\nSunday 01 March 2026  21:19:06 +0000 (0:00:14.378)       0:13:54.493 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:19:37 +0000 (0:00:30.991)       0:14:25.485 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:19:37 +0000 (0:00:00.079)       0:14:25.564 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.loki : Deploy Helm chart] ****************************\nSunday 01 March 2026  21:19:38 +0000 (0:00:00.621)       0:14:26.186 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:19:42 +0000 (0:00:03.918)       0:14:30.104 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:19:42 +0000 (0:00:00.064)       0:14:30.169 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.vector : Deploy Helm chart] **************************\nSunday 01 March 2026  21:19:43 +0000 (0:00:00.628)       0:14:30.798 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:19:45 +0000 (0:00:02.592)       0:14:33.390 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:19:45 +0000 (0:00:00.051)       0:14:33.442 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.goldpinger : Deploy Helm chart] **********************\nSunday 01 March 2026  21:19:46 +0000 (0:00:00.502)       0:14:33.945 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.ipmi_exporter : Deploy service] **********************\nSunday 01 March 2026  21:19:48 +0000 (0:00:02.173)       0:14:36.118 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:19:49 +0000 (0:00:00.803)       0:14:36.921 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:19:49 +0000 (0:00:00.058)       0:14:36.980 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.prometheus_pushgateway : Deploy Helm chart] **********\nSunday 01 March 2026  21:19:49 +0000 (0:00:00.584)       0:14:37.565 **********\nchanged: [instance]\n\nPLAY [Deploy OpenStack] ********************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:19:51 +0000 (0:00:01.632)       0:14:39.198 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:19:53 +0000 (0:00:02.108)       0:14:41.307 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:19:53 +0000 (0:00:00.074)       0:14:41.381 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:19:53 +0000 (0:00:00.050)       0:14:41.431 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:19:53 +0000 (0:00:00.053)       0:14:41.485 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:19:53 +0000 (0:00:00.055)       0:14:41.540 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:19:53 +0000 (0:00:00.054)       0:14:41.594 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:19:53 +0000 (0:00:00.064)       0:14:41.658 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:19:53 +0000 (0:00:00.062)       0:14:41.721 **********\nok: [instance] => (item=oslo_cache)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:19:54 +0000 (0:00:00.074)       0:14:41.795 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:19:54 +0000 (0:00:00.072)       0:14:41.868 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:19:54 +0000 (0:00:00.075)       0:14:41.944 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.memcached : Deploy Helm chart] ***********************\nSunday 01 March 2026  21:19:54 +0000 (0:00:00.540)       0:14:42.484 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.memcached : Apply manifests for monitoring] **********\nSunday 01 March 2026  21:19:55 +0000 (0:00:01.237)       0:14:43.722 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstacksdk : Install openstacksdk] *****************\nSunday 01 March 2026  21:19:56 +0000 (0:00:00.999)       0:14:44.721 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstacksdk : Create openstack config directory] ****\nSunday 01 March 2026  21:20:02 +0000 (0:00:05.350)       0:14:50.072 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstacksdk : Generate cloud config file] ***********\nSunday 01 March 2026  21:20:02 +0000 (0:00:00.231)       0:14:50.304 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:20:03 +0000 (0:00:00.541)       0:14:50.846 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:20:03 +0000 (0:00:00.089)       0:14:50.935 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  21:20:03 +0000 (0:00:00.093)       0:14:51.029 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  21:20:04 +0000 (0:00:00.805)       0:14:51.835 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  21:20:04 +0000 (0:00:00.072)       0:14:51.908 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  21:20:04 +0000 (0:00:00.058)       0:14:51.966 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  21:20:04 +0000 (0:00:00.053)       0:14:52.019 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:21:05 +0000 (0:01:00.977)       0:15:52.997 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:21:05 +0000 (0:00:00.666)       0:15:53.664 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:21:05 +0000 (0:00:00.065)       0:15:53.729 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:21:06 +0000 (0:00:00.060)       0:15:53.789 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:21:06 +0000 (0:00:00.055)       0:15:53.845 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:21:06 +0000 (0:00:00.052)       0:15:53.898 **********\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=identity)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=oslo_messaging)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:21:06 +0000 (0:00:00.092)       0:15:53.990 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:21:06 +0000 (0:00:00.083)       0:15:54.074 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:21:06 +0000 (0:00:00.073)       0:15:54.147 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Create Keycloak realms] *******************\nSunday 01 March 2026  21:21:06 +0000 (0:00:00.538)       0:15:54.685 **********\nok: [instance] => (item=None)\nok: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Setup Keycloak Authentication Required Actions (MFA)] ***\nSunday 01 March 2026  21:21:08 +0000 (0:00:01.449)       0:15:56.135 **********\nok: [instance] => (item=atmosphere)\n\nTASK [vexxhost.atmosphere.keystone : Create ConfigMap with all OpenID connect configurations] ***\nSunday 01 March 2026  21:21:09 +0000 (0:00:01.276)       0:15:57.412 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Create Keycloak clients] ******************\nSunday 01 March 2026  21:21:10 +0000 (0:00:00.917)       0:15:58.330 **********\nchanged: [instance] => (item=None)\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Assign realm-management roles to service account] ***\nSunday 01 March 2026  21:21:11 +0000 (0:00:01.199)       0:15:59.530 **********\nchanged: [instance] => (item=None)\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Deploy Helm chart] ************************\nSunday 01 March 2026  21:21:13 +0000 (0:00:01.713)       0:16:01.243 **********\nchanged: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  21:23:52 +0000 (0:02:39.018)       0:18:40.262 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  21:23:52 +0000 (0:00:00.110)       0:18:40.372 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  21:23:52 +0000 (0:00:00.065)       0:18:40.438 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  21:23:52 +0000 (0:00:00.062)       0:18:40.500 **********\nok: [instance]\n\nTASK [Create Ingress identity] *************************************************\nSunday 01 March 2026  21:23:52 +0000 (0:00:00.071)       0:18:40.572 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress identity] *******************\nSunday 01 March 2026  21:23:52 +0000 (0:00:00.080)       0:18:40.652 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Validate if ingress is reachable] *********\nSunday 01 March 2026  21:23:53 +0000 (0:00:01.011)       0:18:41.664 **********\nFAILED - RETRYING: [instance]: Validate if ingress is reachable (120 retries left).\nFAILED - RETRYING: [instance]: Validate if ingress is reachable (119 retries left).\nFAILED - RETRYING: [instance]: Validate if ingress is reachable (118 retries left).\nok: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Wait until identity service ready] ********\nSunday 01 March 2026  21:23:58 +0000 (0:00:04.407)       0:18:46.071 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Create Keystone domains] ******************\nSunday 01 March 2026  21:23:59 +0000 (0:00:00.716)       0:18:46.788 **********\nok: [instance] => (item=atmosphere)\n\nTASK [vexxhost.atmosphere.keystone : Create Keystone identity providers] *******\nSunday 01 March 2026  21:24:00 +0000 (0:00:01.261)       0:18:48.049 **********\nchanged: [instance] => (item=atmosphere)\n\nTASK [vexxhost.atmosphere.keystone : Create Keystone federation mappings] ******\nSunday 01 March 2026  21:24:01 +0000 (0:00:01.185)       0:18:49.235 **********\nchanged: [instance] => (item=atmosphere)\n\nTASK [vexxhost.atmosphere.keystone : Create Keystone federation protocols] *****\nSunday 01 March 2026  21:24:02 +0000 (0:00:01.199)       0:18:50.435 **********\nchanged: [instance] => (item=atmosphere)\n\nTASK [vexxhost.containers.directory : Create directory (/etc/nerdctl)] *********\nSunday 01 March 2026  21:24:03 +0000 (0:00:01.259)       0:18:51.695 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  21:24:04 +0000 (0:00:00.238)       0:18:51.934 **********\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nSunday 01 March 2026  21:24:05 +0000 (0:00:01.188)       0:18:53.122 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:24:05 +0000 (0:00:00.270)       0:18:53.393 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/nerdctl/releases/download/v2.2.0/nerdctl-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:24:05 +0000 (0:00:00.086)       0:18:53.480 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:24:06 +0000 (0:00:00.518)       0:18:53.998 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.nerdctl : Create nerdctl config] *********************\nSunday 01 March 2026  21:24:07 +0000 (0:00:01.280)       0:18:55.278 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Uninstall OpenStack client system packages] ***\nSunday 01 March 2026  21:24:08 +0000 (0:00:00.538)       0:18:55.817 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Uninstall Ubuntu Cloud Archive keyring] ***\nSunday 01 March 2026  21:24:08 +0000 (0:00:00.773)       0:18:56.590 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Remove Ubuntu Cloud Archive repository] ***\nSunday 01 March 2026  21:24:09 +0000 (0:00:00.777)       0:18:57.368 **********\nok: [instance]\n\nTASK [Generate OpenStack-Helm endpoints] ***************************************\nSunday 01 March 2026  21:24:10 +0000 (0:00:00.373)       0:18:57.741 **********\nincluded: openstack_helm_endpoints for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:24:10 +0000 (0:00:00.124)       0:18:57.866 **********\nskipping: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:24:10 +0000 (0:00:00.062)       0:18:57.928 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:24:10 +0000 (0:00:00.052)       0:18:57.981 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:24:10 +0000 (0:00:00.049)       0:18:58.031 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:24:10 +0000 (0:00:00.050)       0:18:58.081 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:24:10 +0000 (0:00:00.053)       0:18:58.134 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:24:10 +0000 (0:00:00.058)       0:18:58.192 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:24:10 +0000 (0:00:00.063)       0:18:58.256 **********\nok: [instance] => (item=identity)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:24:10 +0000 (0:00:00.070)       0:18:58.327 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Generate openrc file] ****************\nSunday 01 March 2026  21:24:10 +0000 (0:00:00.072)       0:18:58.400 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Generate openstack aliases] **********\nSunday 01 March 2026  21:24:11 +0000 (0:00:00.561)       0:18:58.962 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:24:12 +0000 (0:00:00.795)       0:18:59.757 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:24:12 +0000 (0:00:00.089)       0:18:59.846 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  21:24:12 +0000 (0:00:00.092)       0:18:59.939 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  21:24:12 +0000 (0:00:00.737)       0:19:00.676 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  21:24:13 +0000 (0:00:00.059)       0:19:00.736 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  21:24:13 +0000 (0:00:00.054)       0:19:00.790 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  21:24:13 +0000 (0:00:00.052)       0:19:00.842 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:24:44 +0000 (0:00:30.921)       0:19:31.764 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:24:44 +0000 (0:00:00.702)       0:19:32.466 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:24:44 +0000 (0:00:00.066)       0:19:32.533 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:24:44 +0000 (0:00:00.060)       0:19:32.593 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:24:44 +0000 (0:00:00.060)       0:19:32.653 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:24:44 +0000 (0:00:00.059)       0:19:32.713 **********\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=oslo_messaging)\nok: [instance] => (item=identity)\nok: [instance] => (item=key_manager)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:24:45 +0000 (0:00:00.122)       0:19:32.835 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:24:45 +0000 (0:00:00.074)       0:19:32.909 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:24:45 +0000 (0:00:00.067)       0:19:32.977 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.barbican : Deploy Helm chart] ************************\nSunday 01 March 2026  21:24:45 +0000 (0:00:00.562)       0:19:33.539 **********\nchanged: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  21:26:16 +0000 (0:01:31.044)       0:21:04.584 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  21:26:16 +0000 (0:00:00.118)       0:21:04.703 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  21:26:17 +0000 (0:00:00.066)       0:21:04.769 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  21:26:17 +0000 (0:00:00.061)       0:21:04.831 **********\nok: [instance]\n\nTASK [Create Ingress key-manager] **********************************************\nSunday 01 March 2026  21:26:17 +0000 (0:00:00.079)       0:21:04.911 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress key-manager] ****************\nSunday 01 March 2026  21:26:17 +0000 (0:00:00.082)       0:21:04.993 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.barbican : Create creator role] **********************\nSunday 01 March 2026  21:26:18 +0000 (0:00:01.054)       0:21:06.047 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.barbican : Add implied roles] ************************\nSunday 01 March 2026  21:26:19 +0000 (0:00:01.217)       0:21:07.265 **********\nchanged: [instance] => (item={'role': 'member', 'implies': 'creator'})\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:26:26 +0000 (0:00:07.254)       0:21:14.520 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:26:26 +0000 (0:00:00.074)       0:21:14.594 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph : Deploy Helm chart] ***********************\nSunday 01 March 2026  21:26:27 +0000 (0:00:00.534)       0:21:15.129 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:26:30 +0000 (0:00:02.706)       0:21:17.835 **********\nskipping: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:26:30 +0000 (0:00:00.068)       0:21:17.904 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:26:30 +0000 (0:00:00.052)       0:21:17.956 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:26:30 +0000 (0:00:00.059)       0:21:18.015 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:26:30 +0000 (0:00:00.053)       0:21:18.069 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:26:30 +0000 (0:00:00.058)       0:21:18.128 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:26:30 +0000 (0:00:00.051)       0:21:18.180 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:26:30 +0000 (0:00:00.069)       0:21:18.250 **********\nok: [instance] => (item=rook_ceph_cluster)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:26:30 +0000 (0:00:00.070)       0:21:18.321 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:26:30 +0000 (0:00:00.072)       0:21:18.394 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:26:30 +0000 (0:00:00.073)       0:21:18.468 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Set mgr/cephadm/warn_on_stray_daemons to false] ***\nSunday 01 March 2026  21:26:31 +0000 (0:00:00.517)       0:21:18.985 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Collect \"ceph quorum_status\" output from a monitor] ***\nSunday 01 March 2026  21:26:58 +0000 (0:00:26.849)       0:21:45.835 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Retrieve keyring for client.admin] ***\nSunday 01 March 2026  21:27:12 +0000 (0:00:14.652)       0:22:00.488 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Retrieve keyring for monitors] ***\nSunday 01 March 2026  21:27:14 +0000 (0:00:01.560)       0:22:02.048 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Create Ceph cluster resource] ****\nSunday 01 March 2026  21:27:15 +0000 (0:00:01.259)       0:22:03.308 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Deploy Helm chart] ***************\nSunday 01 March 2026  21:27:16 +0000 (0:00:00.783)       0:22:04.091 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Create OpenStack user] ***********\nSunday 01 March 2026  21:27:19 +0000 (0:00:02.749)       0:22:06.841 **********\n[WARNING]: Module did not set no_log for update_password\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Grant access to \"service\" project] ***\nSunday 01 March 2026  21:27:21 +0000 (0:00:01.911)       0:22:08.752 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Create OpenStack service] ********\nSunday 01 March 2026  21:27:27 +0000 (0:00:06.930)       0:22:15.683 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Create OpenStack endpoints] ******\nSunday 01 March 2026  21:27:29 +0000 (0:00:01.151)       0:22:16.834 **********\nchanged: [instance] => (item={'interface': 'public', 'url': 'https://object-store.199-204-45-156.nip.io/swift/v1/%(tenant_id)s'})\nchanged: [instance] => (item={'interface': 'internal', 'url': 'http://rook-ceph-rgw-ceph.openstack.svc.cluster.local/swift/v1/%(tenant_id)s'})\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  21:27:31 +0000 (0:00:02.083)       0:22:18.918 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  21:27:31 +0000 (0:00:00.114)       0:22:19.033 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  21:27:31 +0000 (0:00:00.052)       0:22:19.085 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  21:27:31 +0000 (0:00:00.056)       0:22:19.141 **********\nok: [instance]\n\nTASK [Create Ingress rook-ceph-cluster] ****************************************\nSunday 01 March 2026  21:27:31 +0000 (0:00:00.076)       0:22:19.218 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress rook-ceph-cluster] **********\nSunday 01 March 2026  21:27:31 +0000 (0:00:00.089)       0:22:19.307 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:27:32 +0000 (0:00:01.004)       0:22:20.312 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:27:32 +0000 (0:00:00.067)       0:22:20.379 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Collect \"ceph mon dump\" output from a monitor] ***\nSunday 01 March 2026  21:27:33 +0000 (0:00:00.561)       0:22:20.941 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Generate fact with list of Ceph monitors] ***\nSunday 01 March 2026  21:27:47 +0000 (0:00:14.654)       0:22:35.596 **********\nok: [instance] => (item=10.96.240.200)\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Create Ceph service] *************\nSunday 01 March 2026  21:27:47 +0000 (0:00:00.079)       0:22:35.675 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Create Ceph endpoints] ***********\nSunday 01 March 2026  21:27:48 +0000 (0:00:00.710)       0:22:36.385 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Retrieve client.admin keyring] ***\nSunday 01 March 2026  21:27:49 +0000 (0:00:00.768)       0:22:37.154 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Parse client.admin keyring] ******\nSunday 01 March 2026  21:27:50 +0000 (0:00:01.324)       0:22:38.478 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Create \"pvc-ceph-client-key\" secret] ***\nSunday 01 March 2026  21:27:50 +0000 (0:00:00.068)       0:22:38.547 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Deploy Helm chart] ***************\nSunday 01 March 2026  21:27:51 +0000 (0:00:00.711)       0:22:39.259 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:27:52 +0000 (0:00:01.283)       0:22:40.543 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:27:53 +0000 (0:00:00.218)       0:22:40.761 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  21:27:53 +0000 (0:00:00.086)       0:22:40.848 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  21:27:53 +0000 (0:00:00.689)       0:22:41.537 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  21:27:53 +0000 (0:00:00.169)       0:22:41.707 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  21:27:54 +0000 (0:00:00.054)       0:22:41.762 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  21:27:54 +0000 (0:00:00.060)       0:22:41.822 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:28:24 +0000 (0:00:30.866)       0:23:12.689 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:28:25 +0000 (0:00:00.716)       0:23:13.405 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:28:25 +0000 (0:00:00.073)       0:23:13.478 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:28:25 +0000 (0:00:00.061)       0:23:13.540 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:28:25 +0000 (0:00:00.066)       0:23:13.606 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:28:25 +0000 (0:00:00.063)       0:23:13.670 **********\nok: [instance] => (item=image)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=oslo_messaging)\nok: [instance] => (item=identity)\nok: [instance] => (item=dashboard)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:28:26 +0000 (0:00:00.131)       0:23:13.802 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:28:26 +0000 (0:00:00.083)       0:23:13.885 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:28:26 +0000 (0:00:00.085)       0:23:13.970 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance : Deploy Helm chart] **************************\nSunday 01 March 2026  21:28:26 +0000 (0:00:00.544)       0:23:14.515 **********\nchanged: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  21:30:33 +0000 (0:02:06.719)       0:25:21.234 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  21:30:33 +0000 (0:00:00.117)       0:25:21.352 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  21:30:33 +0000 (0:00:00.050)       0:25:21.403 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  21:30:33 +0000 (0:00:00.059)       0:25:21.462 **********\nok: [instance]\n\nTASK [Create Ingress image] ****************************************************\nSunday 01 March 2026  21:30:33 +0000 (0:00:00.075)       0:25:21.538 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress image] **********************\nSunday 01 March 2026  21:30:33 +0000 (0:00:00.077)       0:25:21.616 **********\nchanged: [instance]\n\nTASK [Create images] ***********************************************************\nSunday 01 March 2026  21:30:34 +0000 (0:00:01.031)       0:25:22.647 **********\nincluded: glance_image for instance => (item={'container_format': 'bare', 'disk_format': 'raw', 'is_public': True, 'min_disk': 1, 'name': 'cirros', 'url': 'http://download.cirros-cloud.net/0.6.2/cirros-0.6.2-x86_64-disk.img'})\n\nTASK [vexxhost.atmosphere.qemu_utils : Install packages] ***********************\nSunday 01 March 2026  21:30:35 +0000 (0:00:00.132)       0:25:22.780 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************\nSunday 01 March 2026  21:30:38 +0000 (0:00:03.180)       0:25:25.960 **********\nFAILED - RETRYING: [instance]: Check if image exists (120 retries left).\nFAILED - RETRYING: [instance]: Check if image exists (119 retries left).\nFAILED - RETRYING: [instance]: Check if image exists (118 retries left).\nFAILED - RETRYING: [instance]: Check if image exists (117 retries left).\nFAILED - RETRYING: [instance]: Check if image exists (116 retries left).\nFAILED - RETRYING: [instance]: Check if image exists (115 retries left).\nFAILED - RETRYING: [instance]: Check if image exists (114 retries left).\nFAILED - RETRYING: [instance]: Check if image exists (113 retries left).\nFAILED - RETRYING: [instance]: Check if image exists (112 retries left).\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****\nSunday 01 March 2026  21:30:56 +0000 (0:00:18.553)       0:25:44.514 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Download image] ***********************\nSunday 01 March 2026  21:30:57 +0000 (0:00:00.224)       0:25:44.739 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Get image format] *********************\nSunday 01 March 2026  21:30:57 +0000 (0:00:00.782)       0:25:45.522 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***\nSunday 01 March 2026  21:30:58 +0000 (0:00:00.239)       0:25:45.761 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******\nSunday 01 March 2026  21:30:58 +0000 (0:00:00.316)       0:25:46.078 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************\nSunday 01 March 2026  21:30:59 +0000 (0:00:00.725)       0:25:46.804 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************\nSunday 01 March 2026  21:31:04 +0000 (0:00:05.543)       0:25:52.347 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:31:04 +0000 (0:00:00.324)       0:25:52.671 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:31:05 +0000 (0:00:00.078)       0:25:52.749 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:31:05 +0000 (0:00:00.053)       0:25:52.803 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:31:05 +0000 (0:00:00.056)       0:25:52.859 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:31:05 +0000 (0:00:00.057)       0:25:52.917 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:31:05 +0000 (0:00:00.063)       0:25:52.980 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:31:05 +0000 (0:00:00.059)       0:25:53.039 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:31:05 +0000 (0:00:00.060)       0:25:53.100 **********\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=staffeln)\nok: [instance] => (item=identity)\nok: [instance] => (item=oslo_db)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:31:05 +0000 (0:00:00.102)       0:25:53.203 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:31:05 +0000 (0:00:00.083)       0:25:53.286 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:31:05 +0000 (0:00:00.078)       0:25:53.365 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.staffeln : Deploy Helm chart] ************************\nSunday 01 March 2026  21:31:06 +0000 (0:00:00.557)       0:25:53.922 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:31:27 +0000 (0:00:20.890)       0:26:14.813 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:31:27 +0000 (0:00:00.099)       0:26:14.912 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  21:31:27 +0000 (0:00:00.089)       0:26:15.002 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  21:31:27 +0000 (0:00:00.722)       0:26:15.724 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  21:31:28 +0000 (0:00:00.056)       0:26:15.781 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  21:31:28 +0000 (0:00:00.061)       0:26:15.842 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  21:31:28 +0000 (0:00:00.057)       0:26:15.899 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:31:59 +0000 (0:00:30.872)       0:26:46.771 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:31:59 +0000 (0:00:00.668)       0:26:47.440 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:31:59 +0000 (0:00:00.065)       0:26:47.505 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:31:59 +0000 (0:00:00.061)       0:26:47.567 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:31:59 +0000 (0:00:00.063)       0:26:47.630 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:31:59 +0000 (0:00:00.063)       0:26:47.694 **********\nok: [instance] => (item=volumev3)\nok: [instance] => (item=image)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=oslo_messaging)\nok: [instance] => (item=identity)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:32:00 +0000 (0:00:00.138)       0:26:47.832 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:32:00 +0000 (0:00:00.071)       0:26:47.904 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:32:00 +0000 (0:00:00.064)       0:26:47.968 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.cinder : Generate Helm values] ***********************\nSunday 01 March 2026  21:32:00 +0000 (0:00:00.554)       0:26:48.523 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.cinder : Append Helm values (Staffeln)] **************\nSunday 01 March 2026  21:32:01 +0000 (0:00:00.358)       0:26:48.881 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.cinder : Deploy Helm chart] **************************\nSunday 01 March 2026  21:32:01 +0000 (0:00:00.059)       0:26:48.940 **********\nchanged: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  21:36:43 +0000 (0:04:42.760)       0:31:31.701 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  21:36:44 +0000 (0:00:00.104)       0:31:31.806 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  21:36:44 +0000 (0:00:00.052)       0:31:31.858 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  21:36:44 +0000 (0:00:00.051)       0:31:31.909 **********\nok: [instance]\n\nTASK [Create Ingress volumev3] *************************************************\nSunday 01 March 2026  21:36:44 +0000 (0:00:00.193)       0:31:32.102 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress volumev3] *******************\nSunday 01 March 2026  21:36:44 +0000 (0:00:00.073)       0:31:32.175 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:36:45 +0000 (0:00:01.001)       0:31:33.177 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:36:45 +0000 (0:00:00.085)       0:31:33.263 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:36:45 +0000 (0:00:00.052)       0:31:33.315 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:36:45 +0000 (0:00:00.050)       0:31:33.365 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:36:45 +0000 (0:00:00.042)       0:31:33.408 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:36:45 +0000 (0:00:00.053)       0:31:33.461 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:36:45 +0000 (0:00:00.051)       0:31:33.513 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:36:45 +0000 (0:00:00.053)       0:31:33.567 **********\nok: [instance] => (item=placement)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=identity)\nok: [instance] => (item=oslo_db)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:36:45 +0000 (0:00:00.097)       0:31:33.664 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:36:46 +0000 (0:00:00.074)       0:31:33.739 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:36:46 +0000 (0:00:00.076)       0:31:33.815 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.placement : Deploy Helm chart] ***********************\nSunday 01 March 2026  21:36:46 +0000 (0:00:00.533)       0:31:34.348 **********\nchanged: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  21:38:05 +0000 (0:01:18.409)       0:32:52.757 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  21:38:05 +0000 (0:00:00.101)       0:32:52.859 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  21:38:05 +0000 (0:00:00.060)       0:32:52.919 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  21:38:05 +0000 (0:00:00.056)       0:32:52.976 **********\nok: [instance]\n\nTASK [Create Ingress placement] ************************************************\nSunday 01 March 2026  21:38:05 +0000 (0:00:00.067)       0:32:53.044 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress placement] ******************\nSunday 01 March 2026  21:38:05 +0000 (0:00:00.075)       0:32:53.119 **********\nchanged: [instance]\n\nPLAY [Configure operating system] **********************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:38:06 +0000 (0:00:01.000)       0:32:54.120 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.lpfc : Detect if the \"lpfc\" module is loaded] ********\nSunday 01 March 2026  21:38:08 +0000 (0:00:02.613)       0:32:56.734 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.lpfc : Install the configuration file] ***************\nSunday 01 March 2026  21:38:09 +0000 (0:00:00.255)       0:32:56.989 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.lpfc : Get the values for the module parameters] *****\nSunday 01 March 2026  21:38:09 +0000 (0:00:00.056)       0:32:57.046 **********\nskipping: [instance] => (item=lpfc_lun_queue_depth)\nskipping: [instance] => (item=lpfc_sg_seg_cnt)\nskipping: [instance] => (item=lpfc_max_luns)\nskipping: [instance] => (item=lpfc_enable_fc4_type)\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.lpfc : Detect if the run-time module parameters are set correctly] ***\nSunday 01 March 2026  21:38:09 +0000 (0:00:00.067)       0:32:57.113 **********\nskipping: [instance] => (item=lpfc_lun_queue_depth)\nskipping: [instance] => (item=lpfc_sg_seg_cnt)\nskipping: [instance] => (item=lpfc_max_luns)\nskipping: [instance] => (item=lpfc_enable_fc4_type)\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.lpfc : Update \"initramfs\" if the configuration file has changed] ***\nSunday 01 March 2026  21:38:09 +0000 (0:00:00.063)       0:32:57.177 **********\nskipping: [instance]\n\nTASK [Reboot the system if the configuration file has changed] *****************\nSunday 01 March 2026  21:38:09 +0000 (0:00:00.055)       0:32:57.233 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Add backports PPA] **********************\nSunday 01 March 2026  21:38:09 +0000 (0:00:00.077)       0:32:57.310 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Install the multipathd package] *********\nSunday 01 March 2026  21:38:13 +0000 (0:00:04.191)       0:33:01.502 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Install the configuration file] *********\nSunday 01 March 2026  21:38:36 +0000 (0:00:22.617)       0:33:24.119 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.iscsi : Ensure iscsid is started] ********************\nSunday 01 March 2026  21:38:36 +0000 (0:00:00.555)       0:33:24.674 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.udev : Add udev rules for Pure Storage FlashArray] ***\nSunday 01 March 2026  21:38:37 +0000 (0:00:00.449)       0:33:25.124 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.udev : Add udev rules for SCSI Unit Attention] *******\nSunday 01 March 2026  21:38:37 +0000 (0:00:00.485)       0:33:25.609 **********\nchanged: [instance]\n\nRUNNING HANDLER [vexxhost.atmosphere.multipathd : Restart \"multipathd\"] ********\nSunday 01 March 2026  21:38:38 +0000 (0:00:00.533)       0:33:26.142 **********\nchanged: [instance]\n\nRUNNING HANDLER [vexxhost.atmosphere.udev : Reload udev] ***********************\nSunday 01 March 2026  21:38:38 +0000 (0:00:00.486)       0:33:26.629 **********\nok: [instance]\n\nPLAY [Deploy SDN] **************************************************************\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:38:39 +0000 (0:00:00.289)       0:33:26.919 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:38:39 +0000 (0:00:00.087)       0:33:27.006 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:38:39 +0000 (0:00:00.054)       0:33:27.061 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:38:39 +0000 (0:00:00.050)       0:33:27.112 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:38:39 +0000 (0:00:00.052)       0:33:27.165 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:38:39 +0000 (0:00:00.213)       0:33:27.378 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:38:39 +0000 (0:00:00.063)       0:33:27.442 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:38:39 +0000 (0:00:00.058)       0:33:27.500 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:38:39 +0000 (0:00:00.052)       0:33:27.553 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:38:39 +0000 (0:00:00.086)       0:33:27.639 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:38:39 +0000 (0:00:00.077)       0:33:27.716 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openvswitch : Get the current status of all systemd values for containerd] ***\nSunday 01 March 2026  21:38:40 +0000 (0:00:00.542)       0:33:28.259 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openvswitch : Assert that LimitMEMLOCK is set to infinity] ***\nSunday 01 March 2026  21:38:40 +0000 (0:00:00.231)       0:33:28.490 **********\nok: [instance] => {\n    \"changed\": false,\n    \"msg\": \"All assertions passed\"\n}\n\nTASK [vexxhost.atmosphere.openvswitch : Pull openvswitch image] ****************\nSunday 01 March 2026  21:38:40 +0000 (0:00:00.062)       0:33:28.552 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openvswitch : Verify openvswitch image pull] *********\nSunday 01 March 2026  21:38:47 +0000 (0:00:06.685)       0:33:35.237 **********\nok: [instance] => {\n    \"changed\": false,\n    \"msg\": \"Successfully pulled openvswitch image\"\n}\n\nTASK [vexxhost.atmosphere.openvswitch : Deploy Helm chart] *********************\nSunday 01 March 2026  21:38:47 +0000 (0:00:00.056)       0:33:35.294 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:38:48 +0000 (0:00:01.414)       0:33:36.709 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:38:49 +0000 (0:00:00.067)       0:33:36.776 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.ovn : Check if ovn_controller DaemonSet exists] ******\nSunday 01 March 2026  21:38:49 +0000 (0:00:00.542)       0:33:37.319 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ovn : Delete existing ovn controller DaemonSet if type label is found] ***\nSunday 01 March 2026  21:38:50 +0000 (0:00:01.222)       0:33:38.541 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ovn : Pull ovn-controller image] *********************\nSunday 01 March 2026  21:38:50 +0000 (0:00:00.067)       0:33:38.608 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ovn : Verify ovn-controller image pull] **************\nSunday 01 March 2026  21:38:56 +0000 (0:00:05.854)       0:33:44.463 **********\nok: [instance] => {\n    \"changed\": false,\n    \"msg\": \"Successfully pulled ovn-controller image\"\n}\n\nTASK [vexxhost.atmosphere.ovn : Deploy Helm chart] *****************************\nSunday 01 March 2026  21:38:56 +0000 (0:00:00.059)       0:33:44.523 **********\nchanged: [instance]\n\nPLAY [controllers[0]] **********************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  21:38:58 +0000 (0:00:01.914)       0:33:46.438 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:39:01 +0000 (0:00:02.831)       0:33:49.270 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:39:01 +0000 (0:00:00.088)       0:33:49.358 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:39:01 +0000 (0:00:00.060)       0:33:49.419 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:39:01 +0000 (0:00:00.077)       0:33:49.496 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:39:01 +0000 (0:00:00.075)       0:33:49.571 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:39:01 +0000 (0:00:00.065)       0:33:49.637 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:39:01 +0000 (0:00:00.064)       0:33:49.701 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:39:02 +0000 (0:00:00.074)       0:33:49.775 **********\nok: [instance] => (item=libvirt_exporter)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:39:02 +0000 (0:00:00.073)       0:33:49.849 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:39:02 +0000 (0:00:00.086)       0:33:49.936 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:39:02 +0000 (0:00:00.090)       0:33:50.026 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.libvirt : Create CA certificates] ********************\nSunday 01 March 2026  21:39:02 +0000 (0:00:00.571)       0:33:50.598 **********\nchanged: [instance] => (item=libvirt-vnc)\nchanged: [instance] => (item=libvirt-api)\n\nTASK [vexxhost.atmosphere.libvirt : Create Issuers] ****************************\nSunday 01 March 2026  21:39:04 +0000 (0:00:01.555)       0:33:52.154 **********\nchanged: [instance] => (item=libvirt-vnc)\nchanged: [instance] => (item=libvirt-api)\n\nTASK [vexxhost.atmosphere.libvirt : Deploy Helm chart] *************************\nSunday 01 March 2026  21:39:05 +0000 (0:00:01.395)       0:33:53.549 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:39:07 +0000 (0:00:01.861)       0:33:55.411 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:39:07 +0000 (0:00:00.086)       0:33:55.497 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.coredns : Deploy Helm chart] *************************\nSunday 01 March 2026  21:39:08 +0000 (0:00:00.533)       0:33:56.031 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstacksdk : Install openstacksdk] *****************\nSunday 01 March 2026  21:39:08 +0000 (0:00:00.097)       0:33:56.128 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstacksdk : Create openstack config directory] ****\nSunday 01 March 2026  21:39:09 +0000 (0:00:01.007)       0:33:57.136 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstacksdk : Generate cloud config file] ***********\nSunday 01 March 2026  21:39:09 +0000 (0:00:00.242)       0:33:57.378 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:39:10 +0000 (0:00:00.534)       0:33:57.913 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:39:10 +0000 (0:00:00.114)       0:33:58.027 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  21:39:10 +0000 (0:00:00.103)       0:33:58.130 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  21:39:11 +0000 (0:00:00.777)       0:33:58.908 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  21:39:11 +0000 (0:00:00.065)       0:33:58.973 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  21:39:11 +0000 (0:00:00.065)       0:33:59.039 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  21:39:11 +0000 (0:00:00.072)       0:33:59.112 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:39:42 +0000 (0:00:30.914)       0:34:30.026 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:39:43 +0000 (0:00:00.771)       0:34:30.798 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:39:43 +0000 (0:00:00.069)       0:34:30.867 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:39:43 +0000 (0:00:00.060)       0:34:30.928 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:39:43 +0000 (0:00:00.068)       0:34:30.996 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:39:43 +0000 (0:00:00.074)       0:34:31.070 **********\nok: [instance] => (item=volumev3)\nok: [instance] => (item=oslo_db_cell0)\nok: [instance] => (item=image)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=oslo_messaging)\nok: [instance] => (item=oslo_db_api)\nok: [instance] => (item=network)\nok: [instance] => (item=compute_metadata)\nok: [instance] => (item=placement)\nok: [instance] => (item=identity)\nok: [instance] => (item=compute)\nok: [instance] => (item=baremetal)\nok: [instance] => (item=compute_novnc_proxy)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:39:43 +0000 (0:00:00.237)       0:34:31.307 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:39:43 +0000 (0:00:00.090)       0:34:31.398 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:39:43 +0000 (0:00:00.079)       0:34:31.478 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.nova : Generate temporary file for SSH public key] ***\nSunday 01 March 2026  21:39:44 +0000 (0:00:00.593)       0:34:32.071 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.nova : Write contents of current private SSH key] ****\nSunday 01 March 2026  21:39:44 +0000 (0:00:00.207)       0:34:32.279 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.nova : Generate public key for SSH private key] ******\nSunday 01 March 2026  21:39:44 +0000 (0:00:00.439)       0:34:32.718 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.nova : Delete temporary file for public SSH key] *****\nSunday 01 March 2026  21:39:45 +0000 (0:00:00.599)       0:34:33.318 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.nova : Remove nova-bootstrap and nova-cell-setup job] ***\nSunday 01 March 2026  21:39:45 +0000 (0:00:00.215)       0:34:33.533 **********\nok: [instance] => (item=nova-bootstrap)\nok: [instance] => (item=nova-cell-setup)\n\nTASK [vexxhost.atmosphere.nova : Deploy Helm chart] ****************************\nSunday 01 March 2026  21:39:47 +0000 (0:00:01.423)       0:34:34.957 **********\nchanged: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  21:42:22 +0000 (0:02:34.791)       0:37:09.748 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  21:42:22 +0000 (0:00:00.145)       0:37:09.894 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  21:42:22 +0000 (0:00:00.071)       0:37:09.965 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  21:42:22 +0000 (0:00:00.063)       0:37:10.029 **********\nok: [instance]\n\nTASK [Create Ingress compute] **************************************************\nSunday 01 March 2026  21:42:22 +0000 (0:00:00.077)       0:37:10.107 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress compute] ********************\nSunday 01 March 2026  21:42:22 +0000 (0:00:00.106)       0:37:10.213 **********\nchanged: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  21:42:23 +0000 (0:00:01.279)       0:37:11.493 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  21:42:23 +0000 (0:00:00.127)       0:37:11.620 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  21:42:23 +0000 (0:00:00.071)       0:37:11.691 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  21:42:24 +0000 (0:00:00.063)       0:37:11.755 **********\nok: [instance]\n\nTASK [Create Ingress compute-novnc-proxy] **************************************\nSunday 01 March 2026  21:42:24 +0000 (0:00:00.097)       0:37:11.853 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress compute-novnc-proxy] ********\nSunday 01 March 2026  21:42:24 +0000 (0:00:00.096)       0:37:11.950 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.nova : Wait until compute api service ready] *********\nSunday 01 March 2026  21:42:25 +0000 (0:00:01.055)       0:37:13.005 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.nova : Create flavors] *******************************\nSunday 01 March 2026  21:42:25 +0000 (0:00:00.715)       0:37:13.721 **********\nFAILED - RETRYING: [instance]: Create flavors (60 retries left).\nFAILED - RETRYING: [instance]: Create flavors (59 retries left).\nchanged: [instance] => (item={'disk': 1, 'name': 'm1.tiny', 'ram': 512, 'vcpus': 1})\nchanged: [instance] => (item={'disk': 20, 'name': 'm1.small', 'ram': 2048, 'vcpus': 1})\nchanged: [instance] => (item={'disk': 40, 'name': 'm1.medium', 'ram': 4096, 'vcpus': 2})\nchanged: [instance] => (item={'disk': 80, 'name': 'm1.large', 'ram': 8192, 'vcpus': 4})\nchanged: [instance] => (item={'disk': 160, 'name': 'm1.xlarge', 'ram': 16384, 'vcpus': 8})\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:42:43 +0000 (0:00:17.554)       0:37:31.276 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:42:43 +0000 (0:00:00.124)       0:37:31.400 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  21:42:43 +0000 (0:00:00.100)       0:37:31.500 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  21:42:44 +0000 (0:00:00.714)       0:37:32.215 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  21:42:44 +0000 (0:00:00.061)       0:37:32.276 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  21:42:44 +0000 (0:00:00.062)       0:37:32.339 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  21:42:44 +0000 (0:00:00.061)       0:37:32.400 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:43:15 +0000 (0:00:30.907)       0:38:03.308 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:43:16 +0000 (0:00:00.754)       0:38:04.062 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:43:16 +0000 (0:00:00.072)       0:38:04.135 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:43:16 +0000 (0:00:00.081)       0:38:04.217 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:43:16 +0000 (0:00:00.071)       0:38:04.289 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:43:16 +0000 (0:00:00.065)       0:38:04.354 **********\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=oslo_messaging)\nok: [instance] => (item=network)\nok: [instance] => (item=dns)\nok: [instance] => (item=load_balancer)\nok: [instance] => (item=compute_metadata)\nok: [instance] => (item=identity)\nok: [instance] => (item=compute)\nok: [instance] => (item=baremetal)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:43:16 +0000 (0:00:00.204)       0:38:04.558 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:43:16 +0000 (0:00:00.087)       0:38:04.646 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:43:17 +0000 (0:00:00.096)       0:38:04.743 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.neutron : Set external_dns_driver] *******************\nSunday 01 March 2026  21:43:17 +0000 (0:00:00.583)       0:38:05.326 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.neutron : Generate Helm values] **********************\nSunday 01 March 2026  21:43:17 +0000 (0:00:00.066)       0:38:05.393 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.neutron : Append Helm values] ************************\nSunday 01 March 2026  21:43:17 +0000 (0:00:00.309)       0:38:05.703 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.neutron : Append Helm values (neutron_policy_server)] ***\nSunday 01 March 2026  21:43:18 +0000 (0:00:00.081)       0:38:05.785 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.neutron : Deploy Helm chart] *************************\nSunday 01 March 2026  21:43:18 +0000 (0:00:00.082)       0:38:05.867 **********\nchanged: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  21:45:22 +0000 (0:02:04.803)       0:40:10.670 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  21:45:23 +0000 (0:00:00.136)       0:40:10.807 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  21:45:23 +0000 (0:00:00.070)       0:40:10.877 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  21:45:23 +0000 (0:00:00.072)       0:40:10.949 **********\nok: [instance]\n\nTASK [Create Ingress network] **************************************************\nSunday 01 March 2026  21:45:23 +0000 (0:00:00.076)       0:40:11.026 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress network] ********************\nSunday 01 March 2026  21:45:23 +0000 (0:00:00.105)       0:40:11.131 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.neutron : Wait until network service ready] **********\nSunday 01 March 2026  21:45:24 +0000 (0:00:01.201)       0:40:12.332 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.neutron : Create networks] ***************************\nSunday 01 March 2026  21:45:25 +0000 (0:00:00.800)       0:40:13.132 **********\nFAILED - RETRYING: [instance]: Create networks (60 retries left).\nFAILED - RETRYING: [instance]: Create networks (59 retries left).\nchanged: [instance] => (item={'external': True, 'mtu_size': 1500, 'name': 'public', 'port_security_enabled': True, 'provider_network_type': 'flat', 'provider_physical_network': 'external', 'shared': True, 'subnets': [{'allocation_pool_end': '10.96.250.220', 'allocation_pool_start': '10.96.250.200', 'cidr': '10.96.250.0/24', 'enable_dhcp': True, 'gateway_ip': '10.96.250.10', 'name': 'public-subnet'}]})\n\nTASK [vexxhost.atmosphere.neutron : Create subnets] ****************************\nSunday 01 March 2026  21:45:39 +0000 (0:00:14.575)       0:40:27.707 **********\nchanged: [instance] => (item=[{'external': True, 'mtu_size': 1500, 'name': 'public', 'port_security_enabled': True, 'provider_network_type': 'flat', 'provider_physical_network': 'external', 'shared': True}, {'allocation_pool_end': '10.96.250.220', 'allocation_pool_start': '10.96.250.200', 'cidr': '10.96.250.0/24', 'enable_dhcp': True, 'gateway_ip': '10.96.250.10', 'name': 'public-subnet'}])\n\nTASK [vexxhost.atmosphere.senlin : Remove OpenStack endpoints] *****************\nSunday 01 March 2026  21:45:42 +0000 (0:00:02.214)       0:40:29.922 **********\nok: [instance] => (item=public)\nok: [instance] => (item=admin)\nok: [instance] => (item=internal)\n\nTASK [vexxhost.atmosphere.senlin : Remove OpenStack service] *******************\nSunday 01 March 2026  21:45:45 +0000 (0:00:02.852)       0:40:32.774 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.senlin : Remove Ingress] *****************************\nSunday 01 March 2026  21:45:45 +0000 (0:00:00.882)       0:40:33.656 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.senlin : Remove Helm chart] **************************\nSunday 01 March 2026  21:45:46 +0000 (0:00:00.693)       0:40:34.350 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.senlin : Remove OpenStack user] **********************\nSunday 01 March 2026  21:45:47 +0000 (0:00:00.602)       0:40:34.952 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:45:48 +0000 (0:00:01.188)       0:40:36.140 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:45:48 +0000 (0:00:00.123)       0:40:36.264 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  21:45:48 +0000 (0:00:00.102)       0:40:36.367 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  21:45:49 +0000 (0:00:00.797)       0:40:37.165 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  21:45:49 +0000 (0:00:00.069)       0:40:37.234 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  21:45:49 +0000 (0:00:00.071)       0:40:37.305 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  21:45:49 +0000 (0:00:00.068)       0:40:37.374 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:46:20 +0000 (0:00:30.946)       0:41:08.320 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:46:21 +0000 (0:00:00.787)       0:41:09.107 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:46:21 +0000 (0:00:00.070)       0:41:09.178 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:46:21 +0000 (0:00:00.087)       0:41:09.265 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:46:21 +0000 (0:00:00.070)       0:41:09.336 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:46:21 +0000 (0:00:00.059)       0:41:09.396 **********\nok: [instance] => (item=cloudformation)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=oslo_messaging)\nok: [instance] => (item=identity)\nok: [instance] => (item=orchestration)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:46:21 +0000 (0:00:00.122)       0:41:09.518 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:46:21 +0000 (0:00:00.078)       0:41:09.597 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:46:21 +0000 (0:00:00.086)       0:41:09.683 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.heat : Deploy Helm chart] ****************************\nSunday 01 March 2026  21:46:22 +0000 (0:00:00.596)       0:41:10.279 **********\nchanged: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  21:49:27 +0000 (0:03:04.817)       0:44:15.097 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  21:49:27 +0000 (0:00:00.130)       0:44:15.227 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  21:49:27 +0000 (0:00:00.073)       0:44:15.301 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  21:49:27 +0000 (0:00:00.076)       0:44:15.377 **********\nok: [instance]\n\nTASK [Create Ingress orchestration] ********************************************\nSunday 01 March 2026  21:49:27 +0000 (0:00:00.078)       0:44:15.456 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress orchestration] **************\nSunday 01 March 2026  21:49:27 +0000 (0:00:00.115)       0:44:15.571 **********\nchanged: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  21:49:28 +0000 (0:00:01.082)       0:44:16.654 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  21:49:29 +0000 (0:00:00.124)       0:44:16.779 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  21:49:29 +0000 (0:00:00.066)       0:44:16.845 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  21:49:29 +0000 (0:00:00.072)       0:44:16.917 **********\nok: [instance]\n\nTASK [Create Ingress cloudformation] *******************************************\nSunday 01 March 2026  21:49:29 +0000 (0:00:00.089)       0:44:17.007 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress cloudformation] *************\nSunday 01 March 2026  21:49:29 +0000 (0:00:00.088)       0:44:17.096 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/etc/nerdctl)] *********\nSunday 01 March 2026  21:49:30 +0000 (0:00:01.162)       0:44:18.259 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  21:49:30 +0000 (0:00:00.288)       0:44:18.547 **********\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nSunday 01 March 2026  21:49:32 +0000 (0:00:01.220)       0:44:19.768 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:49:32 +0000 (0:00:00.269)       0:44:20.037 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/nerdctl/releases/download/v2.2.0/nerdctl-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:49:32 +0000 (0:00:00.106)       0:44:20.143 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:49:32 +0000 (0:00:00.522)       0:44:20.666 **********\nok: [instance]\n\nTASK [vexxhost.containers.nerdctl : Create nerdctl config] *********************\nSunday 01 March 2026  21:49:35 +0000 (0:00:02.072)       0:44:22.738 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Uninstall OpenStack client system packages] ***\nSunday 01 March 2026  21:49:35 +0000 (0:00:00.504)       0:44:23.242 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Uninstall Ubuntu Cloud Archive keyring] ***\nSunday 01 March 2026  21:49:36 +0000 (0:00:00.768)       0:44:24.010 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Remove Ubuntu Cloud Archive repository] ***\nSunday 01 March 2026  21:49:37 +0000 (0:00:00.800)       0:44:24.811 **********\nok: [instance]\n\nTASK [Generate OpenStack-Helm endpoints] ***************************************\nSunday 01 March 2026  21:49:37 +0000 (0:00:00.406)       0:44:25.218 **********\nincluded: openstack_helm_endpoints for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:49:37 +0000 (0:00:00.165)       0:44:25.383 **********\nskipping: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:49:37 +0000 (0:00:00.069)       0:44:25.453 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:49:37 +0000 (0:00:00.058)       0:44:25.512 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:49:37 +0000 (0:00:00.063)       0:44:25.576 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:49:37 +0000 (0:00:00.063)       0:44:25.639 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:49:37 +0000 (0:00:00.055)       0:44:25.695 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:49:38 +0000 (0:00:00.057)       0:44:25.752 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:49:38 +0000 (0:00:00.075)       0:44:25.828 **********\nok: [instance] => (item=identity)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:49:38 +0000 (0:00:00.089)       0:44:25.917 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Generate openrc file] ****************\nSunday 01 March 2026  21:49:38 +0000 (0:00:00.089)       0:44:26.006 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Generate openstack aliases] **********\nSunday 01 March 2026  21:49:38 +0000 (0:00:00.474)       0:44:26.481 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:49:39 +0000 (0:00:00.538)       0:44:27.019 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:49:39 +0000 (0:00:00.109)       0:44:27.128 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  21:49:39 +0000 (0:00:00.110)       0:44:27.239 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  21:49:40 +0000 (0:00:00.688)       0:44:27.927 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  21:49:40 +0000 (0:00:00.069)       0:44:27.997 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  21:49:40 +0000 (0:00:00.068)       0:44:28.066 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  21:49:40 +0000 (0:00:00.065)       0:44:28.131 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:50:11 +0000 (0:00:30.915)       0:44:59.047 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:50:12 +0000 (0:00:00.725)       0:44:59.772 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:50:12 +0000 (0:00:00.068)       0:44:59.841 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:50:12 +0000 (0:00:00.084)       0:44:59.925 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:50:12 +0000 (0:00:00.071)       0:44:59.997 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:50:12 +0000 (0:00:00.066)       0:45:00.063 **********\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=oslo_messaging)\nok: [instance] => (item=network)\nok: [instance] => (item=load_balancer)\nok: [instance] => (item=valkey)\nok: [instance] => (item=oslo_db_persistence)\nok: [instance] => (item=identity)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:50:12 +0000 (0:00:00.149)       0:45:00.213 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:50:12 +0000 (0:00:00.094)       0:45:00.308 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:50:12 +0000 (0:00:00.101)       0:45:00.409 **********\nchanged: [instance]\n\nTASK [atmosphere.common.secretgen_controller : Deploy secretgen-controller] ****\nSunday 01 March 2026  21:50:13 +0000 (0:00:00.568)       0:45:00.978 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create management network] *****************\nSunday 01 March 2026  21:50:14 +0000 (0:00:01.365)       0:45:02.343 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create management subnet] ******************\nSunday 01 March 2026  21:50:16 +0000 (0:00:02.371)       0:45:04.714 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create health manager security group] ******\nSunday 01 March 2026  21:50:18 +0000 (0:00:01.907)       0:45:06.622 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create health manager security group rules] ***\nSunday 01 March 2026  21:50:20 +0000 (0:00:01.361)       0:45:07.983 **********\nchanged: [instance] => (item={'protocol': 'udp', 'port': 5555})\nchanged: [instance] => (item={'protocol': 'udp', 'port': 10514})\nchanged: [instance] => (item={'protocol': 'udp', 'port': 20514})\nchanged: [instance] => (item={'protocol': 'tcp', 'port': 10514})\nchanged: [instance] => (item={'protocol': 'tcp', 'port': 20514})\n\nTASK [vexxhost.atmosphere.octavia : Create health manager networking ports] ****\nSunday 01 March 2026  21:50:26 +0000 (0:00:05.891)       0:45:13.875 **********\nchanged: [instance] => (item=instance)\n\nTASK [vexxhost.atmosphere.octavia : Discover facts for other controllers] ******\nSunday 01 March 2026  21:50:27 +0000 (0:00:01.817)       0:45:15.692 **********\nok: [instance] => (item=instance)\n\nTASK [vexxhost.atmosphere.octavia : Set binding for ports] *********************\nSunday 01 March 2026  21:50:30 +0000 (0:00:02.575)       0:45:18.267 **********\nok: [instance] => (item=instance)\n\nTASK [vexxhost.atmosphere.octavia : Get health manager networking ports] *******\nSunday 01 March 2026  21:50:34 +0000 (0:00:04.256)       0:45:22.524 **********\nok: [instance] => (item=instance)\n\nTASK [vexxhost.atmosphere.octavia : Set controller_ip_port_list] ***************\nSunday 01 March 2026  21:50:35 +0000 (0:00:01.067)       0:45:23.591 **********\nok: [instance] => (item=octavia-health-manager-port-instance)\n\nTASK [vexxhost.atmosphere.octavia : Create amphora security group] *************\nSunday 01 March 2026  21:50:35 +0000 (0:00:00.095)       0:45:23.687 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create amphora security group rules] *******\nSunday 01 March 2026  21:50:37 +0000 (0:00:01.098)       0:45:24.785 **********\nchanged: [instance] => (item=[22, {'changed': False, 'ports': [{'allowed_address_pairs': [], 'binding_host_id': 'instance', 'binding_profile': {}, 'binding_vif_details': {'port_filter': True, 'connectivity': 'l2', 'bridge_name': 'br-int', 'datapath_type': 'system', 'bound_drivers': {'0': 'ovn'}}, 'binding_vif_type': 'ovs', 'binding_vnic_type': 'normal', 'created_at': '2026-03-01T21:50:27Z', 'data_plane_status': None, 'description': '', 'device_id': '', 'device_owner': 'octavia:health-mgr', 'device_profile': None, 'dns_assignment': [{'ip_address': '172.24.2.21', 'hostname': 'host-172-24-2-21', 'fqdn': 'host-172-24-2-21.openstacklocal.'}], 'dns_domain': '', 'dns_name': '', 'extra_dhcp_opts': [], 'fixed_ips': [{'subnet_id': '64e18f24-c519-4c88-a50c-38ef854b39a1', 'ip_address': '172.24.2.21'}], 'hardware_offload_type': None, 'ip_allocation': 'immediate', 'is_admin_state_up': True, 'is_port_security_enabled': True, 'mac_address': 'fa:16:3e:3d:ac:d9', 'name': 'octavia-health-manager-port-instance', 'network_id': 'bb482a7a-c46c-457a-a186-b44bfb1280f8', 'numa_affinity_policy': None, 'project_id': 'b6357918f73b458f9e03a5667d465a0e', 'tenant_id': 'b6357918f73b458f9e03a5667d465a0e', 'propagate_uplink_status': None, 'qos_network_policy_id': None, 'qos_policy_id': None, 'resource_request': None, 'security_group_ids': ['5ab21866-8cda-4495-bce8-e3077a52ee5a'], 'status': 'DOWN', 'trunk_details': None, 'trusted': None, 'updated_at': '2026-03-01T21:50:34Z', 'revision_number': 3, 'if_match': None, 'id': '0b032e00-fdd0-4807-8cd5-933cf322bf30', 'tags': []}], 'invocation': {'module_args': {'port': 'octavia-health-manager-port-instance', 'name': 'octavia-health-manager-port-instance', 'wait': True, 'timeout': 180, 'interface': 'public', 'sdk_log_level': 'INFO', 'auth_type': None, 'auth': None, 'region_name': None, 'validate_certs': None, 'ca_cert': None, 'client_cert': None, 'client_key': None, 'api_timeout': None, 'sdk_log_path': None, 'filters': None}}, 'failed': False, 'item': 'instance', 'ansible_loop_var': 'item'}])\nchanged: [instance] => (item=[9443, {'changed': False, 'ports': [{'allowed_address_pairs': [], 'binding_host_id': 'instance', 'binding_profile': {}, 'binding_vif_details': {'port_filter': True, 'connectivity': 'l2', 'bridge_name': 'br-int', 'datapath_type': 'system', 'bound_drivers': {'0': 'ovn'}}, 'binding_vif_type': 'ovs', 'binding_vnic_type': 'normal', 'created_at': '2026-03-01T21:50:27Z', 'data_plane_status': None, 'description': '', 'device_id': '', 'device_owner': 'octavia:health-mgr', 'device_profile': None, 'dns_assignment': [{'ip_address': '172.24.2.21', 'hostname': 'host-172-24-2-21', 'fqdn': 'host-172-24-2-21.openstacklocal.'}], 'dns_domain': '', 'dns_name': '', 'extra_dhcp_opts': [], 'fixed_ips': [{'subnet_id': '64e18f24-c519-4c88-a50c-38ef854b39a1', 'ip_address': '172.24.2.21'}], 'hardware_offload_type': None, 'ip_allocation': 'immediate', 'is_admin_state_up': True, 'is_port_security_enabled': True, 'mac_address': 'fa:16:3e:3d:ac:d9', 'name': 'octavia-health-manager-port-instance', 'network_id': 'bb482a7a-c46c-457a-a186-b44bfb1280f8', 'numa_affinity_policy': None, 'project_id': 'b6357918f73b458f9e03a5667d465a0e', 'tenant_id': 'b6357918f73b458f9e03a5667d465a0e', 'propagate_uplink_status': None, 'qos_network_policy_id': None, 'qos_policy_id': None, 'resource_request': None, 'security_group_ids': ['5ab21866-8cda-4495-bce8-e3077a52ee5a'], 'status': 'DOWN', 'trunk_details': None, 'trusted': None, 'updated_at': '2026-03-01T21:50:34Z', 'revision_number': 3, 'if_match': None, 'id': '0b032e00-fdd0-4807-8cd5-933cf322bf30', 'tags': []}], 'invocation': {'module_args': {'port': 'octavia-health-manager-port-instance', 'name': 'octavia-health-manager-port-instance', 'wait': True, 'timeout': 180, 'interface': 'public', 'sdk_log_level': 'INFO', 'auth_type': None, 'auth': None, 'region_name': None, 'validate_certs': None, 'ca_cert': None, 'client_cert': None, 'client_key': None, 'api_timeout': None, 'sdk_log_path': None, 'filters': None}}, 'failed': False, 'item': 'instance', 'ansible_loop_var': 'item'}])\n\nTASK [vexxhost.atmosphere.octavia : Create amphora flavor] *********************\nSunday 01 March 2026  21:50:39 +0000 (0:00:02.268)       0:45:27.054 **********\nchanged: [instance]\n\nTASK [Upload Amphora image] ****************************************************\nSunday 01 March 2026  21:50:40 +0000 (0:00:01.105)       0:45:28.160 **********\nincluded: glance_image for instance\n\nTASK [vexxhost.atmosphere.qemu_utils : Install packages] ***********************\nSunday 01 March 2026  21:50:40 +0000 (0:00:00.180)       0:45:28.340 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************\nSunday 01 March 2026  21:50:41 +0000 (0:00:01.350)       0:45:29.690 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****\nSunday 01 March 2026  21:50:43 +0000 (0:00:01.399)       0:45:31.090 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Download image] ***********************\nSunday 01 March 2026  21:50:43 +0000 (0:00:00.238)       0:45:31.328 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Get image format] *********************\nSunday 01 March 2026  21:50:59 +0000 (0:00:15.455)       0:45:46.783 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***\nSunday 01 March 2026  21:50:59 +0000 (0:00:00.249)       0:45:47.033 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******\nSunday 01 March 2026  21:51:01 +0000 (0:00:02.512)       0:45:49.545 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************\nSunday 01 March 2026  21:51:02 +0000 (0:00:00.709)       0:45:50.254 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************\nSunday 01 March 2026  21:51:31 +0000 (0:00:29.210)       0:46:19.464 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Get Amphora image information] *************\nSunday 01 March 2026  21:51:32 +0000 (0:00:00.536)       0:46:20.001 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create Amphora SSH key] ********************\nSunday 01 March 2026  21:51:33 +0000 (0:00:00.926)       0:46:20.927 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Grab generated Amphora public key] *********\nSunday 01 March 2026  21:51:38 +0000 (0:00:05.746)       0:46:26.673 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Import Amphora SSH key-pair in OpenStack] ***\nSunday 01 March 2026  21:51:39 +0000 (0:00:00.698)       0:46:27.372 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create CAs & Issuers] **********************\nSunday 01 March 2026  21:51:41 +0000 (0:00:01.375)       0:46:28.748 **********\nchanged: [instance] => (item=octavia-client)\nchanged: [instance] => (item=octavia-server)\n\nTASK [vexxhost.atmosphere.octavia : Create certificate for Octavia clients] ****\nSunday 01 March 2026  21:51:42 +0000 (0:00:01.440)       0:46:30.188 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create admin compute quotaset] *************\nSunday 01 March 2026  21:51:43 +0000 (0:00:00.699)       0:46:30.888 **********\n[WARNING]: Loadbalancer service is not supported by your cloud. Ignoring\nloadbalancer quotas.\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Deploy Helm chart] *************************\nSunday 01 March 2026  21:51:45 +0000 (0:00:01.878)       0:46:32.767 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Add implied roles] *************************\nSunday 01 March 2026  21:53:50 +0000 (0:02:05.129)       0:48:37.897 **********\nchanged: [instance] => (item={'role': 'member', 'implies': 'load-balancer_member'})\nchanged: [instance] => (item={'role': 'reader', 'implies': 'load-balancer_observer'})\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  21:53:57 +0000 (0:00:07.396)       0:48:45.293 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  21:53:57 +0000 (0:00:00.356)       0:48:45.649 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  21:53:57 +0000 (0:00:00.070)       0:48:45.719 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  21:53:58 +0000 (0:00:00.065)       0:48:45.785 **********\nok: [instance]\n\nTASK [Create Ingress load-balancer] ********************************************\nSunday 01 March 2026  21:53:58 +0000 (0:00:00.083)       0:48:45.869 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress load-balancer] **************\nSunday 01 March 2026  21:53:58 +0000 (0:00:00.099)       0:48:45.968 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:53:59 +0000 (0:00:01.158)       0:48:47.126 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:53:59 +0000 (0:00:00.103)       0:48:47.229 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  21:53:59 +0000 (0:00:00.106)       0:48:47.336 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  21:54:00 +0000 (0:00:00.705)       0:48:48.042 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  21:54:00 +0000 (0:00:00.068)       0:48:48.111 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  21:54:00 +0000 (0:00:00.067)       0:48:48.178 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  21:54:00 +0000 (0:00:00.064)       0:48:48.242 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  21:54:31 +0000 (0:00:30.888)       0:49:19.131 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  21:54:32 +0000 (0:00:00.666)       0:49:19.797 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  21:54:32 +0000 (0:00:00.073)       0:49:19.871 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  21:54:32 +0000 (0:00:00.075)       0:49:19.946 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  21:54:32 +0000 (0:00:00.076)       0:49:20.023 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  21:54:32 +0000 (0:00:00.062)       0:49:20.086 **********\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=oslo_messaging)\nok: [instance] => (item=container_infra)\nok: [instance] => (item=identity)\nok: [instance] => (item=key_manager)\nok: [instance] => (item=orchestration)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  21:54:32 +0000 (0:00:00.153)       0:49:20.239 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:54:32 +0000 (0:00:00.100)       0:49:20.340 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:54:32 +0000 (0:00:00.100)       0:49:20.440 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  21:54:33 +0000 (0:00:00.582)       0:49:21.023 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/kubernetes-sigs/cluster-api/releases/download/v1.10.5/clusterctl-linux-amd64\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  21:54:33 +0000 (0:00:00.082)       0:49:21.105 **********\nchanged: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  21:54:34 +0000 (0:00:01.501)       0:49:22.607 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.clusterctl : Create a configuration file] ************\nSunday 01 March 2026  21:54:34 +0000 (0:00:00.108)       0:49:22.716 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  21:54:35 +0000 (0:00:00.667)       0:49:23.383 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  21:54:35 +0000 (0:00:00.099)       0:49:23.482 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.openstack_resource_controller : Create build directory] ***\nSunday 01 March 2026  21:54:36 +0000 (0:00:00.564)       0:49:24.047 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.openstack_resource_controller : Upload Kustomization] ***\nSunday 01 March 2026  21:54:36 +0000 (0:00:00.255)       0:49:24.303 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.openstack_resource_controller : Generate manifests] ***\nSunday 01 March 2026  21:54:37 +0000 (0:00:00.586)       0:49:24.889 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.openstack_resource_controller : Apply manifest to cluster] ***\nSunday 01 March 2026  21:54:38 +0000 (0:00:00.978)       0:49:25.868 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Create a folder for the Cluster API providers] ***\nSunday 01 March 2026  21:54:41 +0000 (0:00:03.006)       0:49:28.875 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Copy over all provider configuration to the remote system] ***\nSunday 01 March 2026  21:54:41 +0000 (0:00:00.244)       0:49:29.119 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Get a list of all Cluster API providers] ***\nSunday 01 March 2026  21:54:43 +0000 (0:00:02.027)       0:49:31.146 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Initialize the management cluster] *****\nSunday 01 March 2026  21:54:44 +0000 (0:00:00.730)       0:49:31.876 **********\nchanged: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Parse provider resources into version mapping] ***\nSunday 01 March 2026  21:54:48 +0000 (0:00:04.371)       0:49:36.248 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Run upgrade if required] ***************\nSunday 01 March 2026  21:54:48 +0000 (0:00:00.064)       0:49:36.312 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Set node selector for Cluster API components] ***\nSunday 01 March 2026  21:54:48 +0000 (0:00:00.071)       0:49:36.383 **********\nchanged: [instance] => (item={'namespace': 'capi-kubeadm-bootstrap-system', 'name': 'capi-kubeadm-bootstrap-controller-manager'})\nchanged: [instance] => (item={'namespace': 'capi-kubeadm-control-plane-system', 'name': 'capi-kubeadm-control-plane-controller-manager'})\nchanged: [instance] => (item={'namespace': 'capi-system', 'name': 'capi-controller-manager'})\nchanged: [instance] => (item={'namespace': 'capo-system', 'name': 'capo-controller-manager'})\n\nTASK [vexxhost.kubernetes.cluster_api : Set default values for imagePullPolicy in kubeadmConfigSpec of CRDs] ***\nSunday 01 March 2026  21:55:46 +0000 (0:00:57.897)       0:50:34.281 **********\nskipping: [instance] => (item={'crd': 'kubeadmcontrolplanetemplates.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/1/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/kubeadmConfigSpec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance] => (item={'crd': 'kubeadmcontrolplanetemplates.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/1/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/kubeadmConfigSpec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance] => (item={'crd': 'kubeadmconfigs.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance] => (item={'crd': 'kubeadmconfigs.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance] => (item={'crd': 'kubeadmconfigtemplates.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance] => (item={'crd': 'kubeadmconfigtemplates.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance] => (item={'crd': 'kubeadmcontrolplanes.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/kubeadmConfigSpec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance] => (item={'crd': 'kubeadmcontrolplanes.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/kubeadmConfigSpec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Set CAPO instance creation timeout] ****\nSunday 01 March 2026  21:55:46 +0000 (0:00:00.102)       0:50:34.383 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.magnum : Deploy Cluster API for Magnum RBAC] *********\nSunday 01 March 2026  21:56:02 +0000 (0:00:16.140)       0:50:50.524 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.magnum : Deploy Helm chart] **************************\nSunday 01 March 2026  21:56:03 +0000 (0:00:00.756)       0:50:51.280 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.magnum : Deploy \"magnum-cluster-api-proxy\"] **********\nSunday 01 March 2026  21:58:11 +0000 (0:02:07.857)       0:52:59.138 **********\nchanged: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  21:58:12 +0000 (0:00:00.814)       0:52:59.953 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  21:58:12 +0000 (0:00:00.153)       0:53:00.107 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  21:58:12 +0000 (0:00:00.087)       0:53:00.194 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  21:58:12 +0000 (0:00:00.077)       0:53:00.272 **********\nok: [instance]\n\nTASK [Create Ingress container-infra] ******************************************\nSunday 01 March 2026  21:58:12 +0000 (0:00:00.088)       0:53:00.361 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress container-infra] ************\nSunday 01 March 2026  21:58:12 +0000 (0:00:00.105)       0:53:00.466 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.magnum : Deploy magnum registry] *********************\nSunday 01 March 2026  21:58:13 +0000 (0:00:01.200)       0:53:01.667 **********\nchanged: [instance]\n\nTASK [Create magnum registry Ingress] ******************************************\nSunday 01 March 2026  21:58:14 +0000 (0:00:00.850)       0:53:02.517 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  21:58:14 +0000 (0:00:00.149)       0:53:02.667 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  21:58:15 +0000 (0:00:00.078)       0:53:02.746 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  21:58:15 +0000 (0:00:00.071)       0:53:02.818 **********\nok: [instance]\n\nTASK [Create Ingress container-infra-registry] *********************************\nSunday 01 March 2026  21:58:15 +0000 (0:00:00.076)       0:53:02.894 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress container-infra-registry] ***\nSunday 01 March 2026  21:58:15 +0000 (0:00:00.095)       0:53:02.990 **********\nchanged: [instance]\n\nTASK [Upload images] ***********************************************************\nSunday 01 March 2026  21:58:16 +0000 (0:00:01.379)       0:53:04.370 **********\nincluded: glance_image for instance => (item={'name': 'ubuntu-2204-kube-v1.34.3', 'url': 'https://github.com/vexxhost/capo-image-elements/releases/download/2025.12-3/ubuntu-22.04-v1.34.3.qcow2', 'distro': 'ubuntu'})\n\nTASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************\nSunday 01 March 2026  21:58:16 +0000 (0:00:00.183)       0:53:04.554 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****\nSunday 01 March 2026  21:58:17 +0000 (0:00:01.017)       0:53:05.571 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Download image] ***********************\nSunday 01 March 2026  21:58:18 +0000 (0:00:00.256)       0:53:05.828 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Get image format] *********************\nSunday 01 March 2026  21:58:28 +0000 (0:00:10.025)       0:53:15.854 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***\nSunday 01 March 2026  21:58:28 +0000 (0:00:00.293)       0:53:16.147 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******\nSunday 01 March 2026  21:58:28 +0000 (0:00:00.074)       0:53:16.221 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************\nSunday 01 March 2026  21:58:29 +0000 (0:00:00.779)       0:53:17.000 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************\nSunday 01 March 2026  21:59:04 +0000 (0:00:35.316)       0:53:52.317 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  21:59:05 +0000 (0:00:00.485)       0:53:52.802 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  21:59:05 +0000 (0:00:00.102)       0:53:52.904 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  21:59:05 +0000 (0:00:00.129)       0:53:53.033 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  21:59:05 +0000 (0:00:00.692)       0:53:53.726 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  21:59:06 +0000 (0:00:00.068)       0:53:53.795 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  21:59:06 +0000 (0:00:00.065)       0:53:53.861 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  21:59:06 +0000 (0:00:00.070)       0:53:53.931 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:00:01 +0000 (0:00:54.979)       0:54:48.910 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:00:01 +0000 (0:00:00.688)       0:54:49.598 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:00:01 +0000 (0:00:00.068)       0:54:49.667 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:00:02 +0000 (0:00:00.071)       0:54:49.738 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:00:02 +0000 (0:00:00.080)       0:54:49.819 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:00:02 +0000 (0:00:00.086)       0:54:49.905 **********\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=oslo_messaging)\nok: [instance] => (item=sharev2)\nok: [instance] => (item=identity)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:00:02 +0000 (0:00:00.127)       0:54:50.033 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:00:02 +0000 (0:00:00.106)       0:54:50.139 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:00:02 +0000 (0:00:00.098)       0:54:50.237 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.manila : Create flavor] ******************************\nSunday 01 March 2026  22:00:03 +0000 (0:00:00.568)       0:54:50.805 **********\nchanged: [instance]\n\nTASK [Upload service image] ****************************************************\nSunday 01 March 2026  22:00:06 +0000 (0:00:03.064)       0:54:53.870 **********\nincluded: glance_image for instance\n\nTASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************\nSunday 01 March 2026  22:00:06 +0000 (0:00:00.351)       0:54:54.222 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****\nSunday 01 March 2026  22:00:07 +0000 (0:00:00.960)       0:54:55.183 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Download image] ***********************\nSunday 01 March 2026  22:00:07 +0000 (0:00:00.248)       0:54:55.431 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Get image format] *********************\nSunday 01 March 2026  22:00:17 +0000 (0:00:09.464)       0:55:04.896 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***\nSunday 01 March 2026  22:00:17 +0000 (0:00:00.557)       0:55:05.454 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******\nSunday 01 March 2026  22:00:17 +0000 (0:00:00.073)       0:55:05.527 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************\nSunday 01 March 2026  22:00:18 +0000 (0:00:00.754)       0:55:06.282 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************\nSunday 01 March 2026  22:00:35 +0000 (0:00:16.593)       0:55:22.875 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.manila : Create generic share driver security group] ***\nSunday 01 March 2026  22:00:35 +0000 (0:00:00.393)       0:55:23.268 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.manila : Create generic share driver security group tcp rules] ***\nSunday 01 March 2026  22:00:37 +0000 (0:00:01.573)       0:55:24.841 **********\nchanged: [instance] => (item=22)\nchanged: [instance] => (item=111)\nchanged: [instance] => (item=2049)\n\nTASK [vexxhost.atmosphere.manila : Create generic share driver security group icmp rules] ***\nSunday 01 March 2026  22:00:40 +0000 (0:00:03.881)       0:55:28.722 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.manila : Generate temporary file for SSH public key] ***\nSunday 01 March 2026  22:00:42 +0000 (0:00:01.307)       0:55:30.030 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.manila : Write contents of current private SSH key] ***\nSunday 01 March 2026  22:00:42 +0000 (0:00:00.207)       0:55:30.238 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.manila : Generate public key for SSH private key] ****\nSunday 01 March 2026  22:00:42 +0000 (0:00:00.416)       0:55:30.654 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.manila : Delete temporary file for public SSH key] ***\nSunday 01 March 2026  22:00:43 +0000 (0:00:00.356)       0:55:31.010 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.manila : Create secret with the SSH keys] ************\nSunday 01 March 2026  22:00:43 +0000 (0:00:00.214)       0:55:31.225 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.manila : Deploy Helm chart] **************************\nSunday 01 March 2026  22:00:44 +0000 (0:00:00.690)       0:55:31.916 **********\nchanged: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:03:09 +0000 (0:02:25.473)       0:57:57.389 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:03:09 +0000 (0:00:00.172)       0:57:57.562 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:03:09 +0000 (0:00:00.071)       0:57:57.634 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:03:09 +0000 (0:00:00.074)       0:57:57.709 **********\nok: [instance]\n\nTASK [Create Ingress sharev2] **************************************************\nSunday 01 March 2026  22:03:10 +0000 (0:00:00.094)       0:57:57.803 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress sharev2] ********************\nSunday 01 March 2026  22:03:10 +0000 (0:00:00.112)       0:57:57.916 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.manila : Update service tenant quotas] ***************\nSunday 01 March 2026  22:03:11 +0000 (0:00:01.268)       0:57:59.184 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:03:13 +0000 (0:00:02.170)       0:58:01.355 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:03:13 +0000 (0:00:00.102)       0:58:01.457 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:03:13 +0000 (0:00:00.069)       0:58:01.527 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:03:13 +0000 (0:00:00.070)       0:58:01.597 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:03:13 +0000 (0:00:00.070)       0:58:01.668 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:03:14 +0000 (0:00:00.080)       0:58:01.748 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:03:14 +0000 (0:00:00.077)       0:58:01.825 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:03:14 +0000 (0:00:00.248)       0:58:02.074 **********\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=identity)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=dashboard)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:03:14 +0000 (0:00:00.113)       0:58:02.187 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:03:14 +0000 (0:00:00.098)       0:58:02.285 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:03:14 +0000 (0:00:00.093)       0:58:02.379 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.horizon : Deploy Helm chart] *************************\nSunday 01 March 2026  22:03:15 +0000 (0:00:00.610)       0:58:02.989 **********\nchanged: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:03:46 +0000 (0:00:31.195)       0:58:34.184 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:03:46 +0000 (0:00:00.163)       0:58:34.348 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:03:46 +0000 (0:00:00.074)       0:58:34.422 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:03:46 +0000 (0:00:00.072)       0:58:34.494 **********\nok: [instance]\n\nTASK [Create Ingress dashboard] ************************************************\nSunday 01 March 2026  22:03:46 +0000 (0:00:00.098)       0:58:34.593 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress dashboard] ******************\nSunday 01 March 2026  22:03:46 +0000 (0:00:00.123)       0:58:34.716 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_exporter : Deploy service] *****************\nSunday 01 March 2026  22:03:48 +0000 (0:00:01.214)       0:58:35.930 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_exporter : Fetch Neutron DB secret] ********\nSunday 01 March 2026  22:03:49 +0000 (0:00:00.863)       0:58:36.794 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_exporter : Fetch Nova DB secret] ***********\nSunday 01 March 2026  22:03:49 +0000 (0:00:00.723)       0:58:37.517 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_exporter : Fetch Octavia DB secret] ********\nSunday 01 March 2026  22:03:50 +0000 (0:00:00.707)       0:58:38.225 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_exporter : Create \"openstack-database-exporter-dsn\" secret] ***\nSunday 01 March 2026  22:03:51 +0000 (0:00:00.705)       0:58:38.931 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.openstack_exporter : Deploy service] *****************\nSunday 01 March 2026  22:03:51 +0000 (0:00:00.779)       0:58:39.711 **********\nchanged: [instance]\n\nPLAY [controllers] *************************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:03:52 +0000 (0:00:00.811)       0:58:40.523 **********\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/etc/nerdctl)] *********\nSunday 01 March 2026  22:03:57 +0000 (0:00:04.407)       0:58:44.930 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  22:03:57 +0000 (0:00:00.481)       0:58:45.412 **********\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nSunday 01 March 2026  22:03:59 +0000 (0:00:01.341)       0:58:46.754 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:03:59 +0000 (0:00:00.316)       0:58:47.070 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/nerdctl/releases/download/v2.2.0/nerdctl-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:03:59 +0000 (0:00:00.120)       0:58:47.191 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:03:59 +0000 (0:00:00.400)       0:58:47.592 **********\nok: [instance]\n\nTASK [vexxhost.containers.nerdctl : Create nerdctl config] *********************\nSunday 01 March 2026  22:04:01 +0000 (0:00:01.146)       0:58:48.738 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Uninstall OpenStack client system packages] ***\nSunday 01 March 2026  22:04:01 +0000 (0:00:00.518)       0:58:49.257 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Uninstall Ubuntu Cloud Archive keyring] ***\nSunday 01 March 2026  22:04:02 +0000 (0:00:00.780)       0:58:50.038 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Remove Ubuntu Cloud Archive repository] ***\nSunday 01 March 2026  22:04:03 +0000 (0:00:00.922)       0:58:50.960 **********\nok: [instance]\n\nTASK [Generate OpenStack-Helm endpoints] ***************************************\nSunday 01 March 2026  22:04:03 +0000 (0:00:00.417)       0:58:51.377 **********\nincluded: openstack_helm_endpoints for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:04:03 +0000 (0:00:00.198)       0:58:51.576 **********\nskipping: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:04:03 +0000 (0:00:00.083)       0:58:51.659 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:04:04 +0000 (0:00:00.072)       0:58:51.732 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:04:04 +0000 (0:00:00.070)       0:58:51.802 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:04:04 +0000 (0:00:00.072)       0:58:51.874 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:04:04 +0000 (0:00:00.069)       0:58:51.944 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:04:04 +0000 (0:00:00.074)       0:58:52.019 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:04:04 +0000 (0:00:00.074)       0:58:52.094 **********\nok: [instance] => (item=identity)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:04:04 +0000 (0:00:00.093)       0:58:52.187 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Generate openrc file] ****************\nSunday 01 March 2026  22:04:04 +0000 (0:00:00.111)       0:58:52.299 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Generate openstack aliases] **********\nSunday 01 March 2026  22:04:05 +0000 (0:00:00.521)       0:58:52.820 **********\nok: [instance]\n\nPLAY [Configure networking] ****************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:04:05 +0000 (0:00:00.581)       0:58:53.402 **********\nok: [instance]\n\nTASK [Add IP address to \"br-ex\"] ***********************************************\nSunday 01 March 2026  22:04:10 +0000 (0:00:04.423)       0:58:57.825 **********\nok: [instance]\n\nTASK [Set \"br-ex\" interface to \"up\"] *******************************************\nSunday 01 March 2026  22:04:10 +0000 (0:00:00.255)       0:58:58.081 **********\nok: [instance]\n\nPLAY RECAP *********************************************************************\ninstance                   : ok=785  changed=282  unreachable=0    failed=0    skipped=252  rescued=0    ignored=2\n\nSunday 01 March 2026  22:04:10 +0000 (0:00:00.218)       0:58:58.299 **********\n===============================================================================\nvexxhost.atmosphere.cinder : Deploy Helm chart ------------------------ 282.76s\nvexxhost.atmosphere.heat : Deploy Helm chart -------------------------- 184.82s\nvexxhost.atmosphere.keystone : Deploy Helm chart ---------------------- 159.02s\nvexxhost.atmosphere.nova : Deploy Helm chart -------------------------- 154.79s\nvexxhost.atmosphere.keycloak : Deploy Helm chart ---------------------- 153.36s\nvexxhost.atmosphere.manila : Deploy Helm chart ------------------------ 145.47s\nvexxhost.atmosphere.magnum : Deploy Helm chart ------------------------ 127.86s\nvexxhost.atmosphere.glance : Deploy Helm chart ------------------------ 126.72s\nvexxhost.atmosphere.octavia : Deploy Helm chart ----------------------- 125.13s\nvexxhost.atmosphere.neutron : Deploy Helm chart ----------------------- 124.80s\nvexxhost.ceph.mon : Run Bootstrap coomand ----------------------------- 123.51s\nvexxhost.atmosphere.percona_xtradb_cluster : Apply Percona XtraDB cluster -- 91.58s\nvexxhost.atmosphere.barbican : Deploy Helm chart ----------------------- 91.04s\nvexxhost.ceph.osd : Install OSDs --------------------------------------- 87.99s\nvexxhost.atmosphere.placement : Deploy Helm chart ---------------------- 78.41s\nvexxhost.atmosphere.rabbitmq : Deploy cluster -------------------------- 60.98s\nvexxhost.kubernetes.cluster_api : Set node selector for Cluster API components -- 57.90s\nvexxhost.atmosphere.rabbitmq : Deploy cluster -------------------------- 54.98s\nvexxhost.atmosphere.glance_image : Upload image into Glance ------------ 35.32s\nvexxhost.atmosphere.horizon : Deploy Helm chart ------------------------ 31.20s\nINFO     [aio > converge] Executed: Successful\nINFO     [aio > idempotence] Executing\n\nPLAY [all] *********************************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:04:13 +0000 (0:00:00.017)       0:00:00.017 **********\n[WARNING]: Platform linux on host instance is using the discovered Python\ninterpreter at /usr/bin/python3.10, but future installation of another Python\ninterpreter could change the meaning of that path. See\nhttps://docs.ansible.com/ansible-\ncore/2.17/reference_appendices/interpreter_discovery.html for more information.\nok: [instance]\n\nTASK [Set a fact with the \"atmosphere_images\" for other plays] *****************\nSunday 01 March 2026  22:04:18 +0000 (0:00:05.001)       0:00:05.018 **********\nok: [instance]\n\nPLAY [Deploy Ceph monitors & managers] *****************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:04:18 +0000 (0:00:00.230)       0:00:05.249 **********\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  22:04:22 +0000 (0:00:03.964)       0:00:09.214 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  22:04:23 +0000 (0:00:00.445)       0:00:09.659 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nSunday 01 March 2026  22:04:23 +0000 (0:00:00.189)       0:00:09.848 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:04:23 +0000 (0:00:00.401)       0:00:10.250 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:04:24 +0000 (0:00:00.110)       0:00:10.360 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:04:24 +0000 (0:00:00.473)       0:00:10.834 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  22:04:24 +0000 (0:00:00.144)       0:00:10.979 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  22:04:24 +0000 (0:00:00.146)       0:00:11.126 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  22:04:25 +0000 (0:00:00.283)       0:00:11.409 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:04:26 +0000 (0:00:01.472)       0:00:12.882 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:04:26 +0000 (0:00:00.137)       0:00:13.019 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:04:27 +0000 (0:00:00.530)       0:00:13.550 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nSunday 01 March 2026  22:04:29 +0000 (0:00:02.584)       0:00:16.134 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nSunday 01 March 2026  22:04:29 +0000 (0:00:00.079)       0:00:16.213 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nSunday 01 March 2026  22:04:29 +0000 (0:00:00.079)       0:00:16.292 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nSunday 01 March 2026  22:04:30 +0000 (0:00:00.085)       0:00:16.378 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nSunday 01 March 2026  22:04:31 +0000 (0:00:01.257)       0:00:17.635 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nSunday 01 March 2026  22:04:31 +0000 (0:00:00.498)       0:00:18.134 **********\nok: [instance] => (item={'path': '/etc/containerd'})\nok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nSunday 01 March 2026  22:04:32 +0000 (0:00:00.962)       0:00:19.096 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nSunday 01 March 2026  22:04:33 +0000 (0:00:00.727)       0:00:19.824 **********\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nSunday 01 March 2026  22:04:33 +0000 (0:00:00.036)       0:00:19.860 **********\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  22:04:34 +0000 (0:00:00.725)       0:00:20.586 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:04:34 +0000 (0:00:00.299)       0:00:20.885 **********\nok: [instance] => {\n    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:04:34 +0000 (0:00:00.114)       0:00:21.000 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:04:35 +0000 (0:00:00.542)       0:00:21.543 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Install AppArmor packages] ******************\nSunday 01 March 2026  22:04:38 +0000 (0:00:03.268)       0:00:24.811 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************\nSunday 01 March 2026  22:04:39 +0000 (0:00:01.237)       0:00:26.049 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create systemd service file for docker] *****\nSunday 01 March 2026  22:04:40 +0000 (0:00:00.315)       0:00:26.365 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create folders for configuration] ***********\nSunday 01 March 2026  22:04:40 +0000 (0:00:00.490)       0:00:26.855 **********\nok: [instance] => (item={'path': '/etc/docker'})\nok: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})\nok: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})\n\nTASK [vexxhost.containers.docker : Create systemd socket file for docker] ******\nSunday 01 March 2026  22:04:41 +0000 (0:00:00.618)       0:00:27.474 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create docker daemon config file] ***********\nSunday 01 March 2026  22:04:41 +0000 (0:00:00.494)       0:00:27.969 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Force any restarts if necessary] ************\nSunday 01 March 2026  22:04:42 +0000 (0:00:00.470)       0:00:28.440 **********\n\nTASK [vexxhost.containers.docker : Enable and start service] *******************\nSunday 01 March 2026  22:04:42 +0000 (0:00:00.044)       0:00:28.485 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******\nSunday 01 March 2026  22:04:42 +0000 (0:00:00.460)       0:00:28.946 **********\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)\n\nTASK [vexxhost.ceph.cephadm : Install packages] ********************************\nSunday 01 March 2026  22:04:42 +0000 (0:00:00.142)       0:00:29.088 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure services are started] *********************\nSunday 01 March 2026  22:04:43 +0000 (0:00:01.234)       0:00:30.323 **********\nok: [instance] => (item=chronyd)\nok: [instance] => (item=sshd)\n\nTASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************\nSunday 01 March 2026  22:04:44 +0000 (0:00:00.766)       0:00:31.090 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************\nSunday 01 March 2026  22:04:45 +0000 (0:00:00.367)       0:00:31.458 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************\nSunday 01 March 2026  22:04:45 +0000 (0:00:00.297)       0:00:31.756 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***\nSunday 01 March 2026  22:04:45 +0000 (0:00:00.528)       0:00:32.284 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Get `cephadm ls` status] *****************************\nSunday 01 March 2026  22:04:46 +0000 (0:00:00.644)       0:00:32.928 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Parse the `cephadm ls` output] ***********************\nSunday 01 March 2026  22:04:52 +0000 (0:00:05.700)       0:00:38.629 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Assimilate existing configs in `ceph.conf`] **********\nSunday 01 March 2026  22:04:52 +0000 (0:00:00.106)       0:00:38.735 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Adopt monitor to cluster] ****************************\nSunday 01 March 2026  22:04:52 +0000 (0:00:00.086)       0:00:38.821 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Adopt manager to cluster] ****************************\nSunday 01 March 2026  22:04:52 +0000 (0:00:00.074)       0:00:38.896 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Enable \"cephadm\" mgr module] *************************\nSunday 01 March 2026  22:04:52 +0000 (0:00:00.073)       0:00:38.970 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Set orchestrator backend to \"cephadm\"] ***************\nSunday 01 March 2026  22:04:52 +0000 (0:00:00.083)       0:00:39.053 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Use `cephadm` user for cephadm] **********************\nSunday 01 March 2026  22:04:52 +0000 (0:00:00.083)       0:00:39.136 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Generate \"cephadm\" key] ******************************\nSunday 01 March 2026  22:04:52 +0000 (0:00:00.076)       0:00:39.213 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.mon : Set Ceph Monitor IP address] *************************\nSunday 01 March 2026  22:04:52 +0000 (0:00:00.081)       0:00:39.294 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Check if any node is bootstrapped] *******************\nSunday 01 March 2026  22:04:53 +0000 (0:00:00.120)       0:00:39.415 **********\nok: [instance] => (item=instance)\n\nTASK [vexxhost.ceph.mon : Select pre-existing bootstrap node if exists] ********\nSunday 01 March 2026  22:04:53 +0000 (0:00:00.339)       0:00:39.754 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Bootstrap cluster] ***********************************\nSunday 01 March 2026  22:04:53 +0000 (0:00:00.114)       0:00:39.868 **********\nskipping: [instance]\n\nTASK [Install Ceph host] *******************************************************\nSunday 01 March 2026  22:04:53 +0000 (0:00:00.083)       0:00:39.952 **********\nincluded: vexxhost.ceph.cephadm_host for instance\n\nTASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******\nSunday 01 March 2026  22:04:53 +0000 (0:00:00.148)       0:00:40.100 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***\nSunday 01 March 2026  22:04:55 +0000 (0:00:02.200)       0:00:42.300 **********\nok: [instance] => (item=instance)\n\nTASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********\nSunday 01 March 2026  22:04:56 +0000 (0:00:00.142)       0:00:42.443 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************\nSunday 01 March 2026  22:04:56 +0000 (0:00:00.522)       0:00:42.965 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Configure \"mon\" label for monitors] ******************\nSunday 01 March 2026  22:04:59 +0000 (0:00:02.596)       0:00:45.561 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mon : Validate monitor exist] ******************************\nSunday 01 March 2026  22:05:01 +0000 (0:00:02.291)       0:00:47.853 **********\nok: [instance]\n\nTASK [Install Ceph host] *******************************************************\nSunday 01 March 2026  22:05:12 +0000 (0:00:10.866)       0:00:58.719 **********\nincluded: vexxhost.ceph.cephadm_host for instance\n\nTASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******\nSunday 01 March 2026  22:05:12 +0000 (0:00:00.148)       0:00:58.868 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***\nSunday 01 March 2026  22:05:12 +0000 (0:00:00.124)       0:00:58.993 **********\nskipping: [instance] => (item=instance)\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********\nSunday 01 March 2026  22:05:12 +0000 (0:00:00.134)       0:00:59.127 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************\nSunday 01 March 2026  22:05:13 +0000 (0:00:00.303)       0:00:59.430 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mgr : Configure \"mgr\" label for managers] ******************\nSunday 01 March 2026  22:05:15 +0000 (0:00:02.882)       0:01:02.313 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mgr : Validate manager exist] ******************************\nSunday 01 March 2026  22:05:18 +0000 (0:00:02.202)       0:01:04.515 **********\nok: [instance]\n\nTASK [vexxhost.ceph.mgr : Enable the Ceph Manager prometheus module] ***********\nSunday 01 March 2026  22:05:20 +0000 (0:00:02.171)       0:01:06.687 **********\nok: [instance]\n\nPLAY [Deploy Ceph OSDs] ********************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:05:23 +0000 (0:00:03.060)       0:01:09.747 **********\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  22:05:27 +0000 (0:00:04.097)       0:01:13.845 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  22:05:27 +0000 (0:00:00.299)       0:01:14.144 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nSunday 01 March 2026  22:05:27 +0000 (0:00:00.110)       0:01:14.255 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:05:28 +0000 (0:00:00.294)       0:01:14.550 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:05:28 +0000 (0:00:00.115)       0:01:14.666 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:05:28 +0000 (0:00:00.413)       0:01:15.079 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  22:05:28 +0000 (0:00:00.150)       0:01:15.230 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  22:05:29 +0000 (0:00:00.298)       0:01:15.528 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  22:05:29 +0000 (0:00:00.294)       0:01:15.822 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:05:30 +0000 (0:00:01.486)       0:01:17.308 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:05:31 +0000 (0:00:00.152)       0:01:17.460 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:05:31 +0000 (0:00:00.495)       0:01:17.956 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nSunday 01 March 2026  22:05:33 +0000 (0:00:02.138)       0:01:20.094 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nSunday 01 March 2026  22:05:33 +0000 (0:00:00.089)       0:01:20.184 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nSunday 01 March 2026  22:05:33 +0000 (0:00:00.090)       0:01:20.275 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nSunday 01 March 2026  22:05:34 +0000 (0:00:00.087)       0:01:20.362 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nSunday 01 March 2026  22:05:35 +0000 (0:00:01.208)       0:01:21.571 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nSunday 01 March 2026  22:05:35 +0000 (0:00:00.554)       0:01:22.125 **********\nok: [instance] => (item={'path': '/etc/containerd'})\nok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nSunday 01 March 2026  22:05:36 +0000 (0:00:01.012)       0:01:23.137 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nSunday 01 March 2026  22:05:37 +0000 (0:00:00.616)       0:01:23.754 **********\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nSunday 01 March 2026  22:05:37 +0000 (0:00:00.035)       0:01:23.789 **********\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  22:05:37 +0000 (0:00:00.501)       0:01:24.291 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:05:38 +0000 (0:00:00.289)       0:01:24.580 **********\nok: [instance] => {\n    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:05:38 +0000 (0:00:00.110)       0:01:24.691 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:05:38 +0000 (0:00:00.449)       0:01:25.141 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Install AppArmor packages] ******************\nSunday 01 March 2026  22:05:42 +0000 (0:00:03.355)       0:01:28.496 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************\nSunday 01 March 2026  22:05:43 +0000 (0:00:01.502)       0:01:29.999 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create systemd service file for docker] *****\nSunday 01 March 2026  22:05:43 +0000 (0:00:00.241)       0:01:30.240 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create folders for configuration] ***********\nSunday 01 March 2026  22:05:44 +0000 (0:00:00.509)       0:01:30.749 **********\nok: [instance] => (item={'path': '/etc/docker'})\nok: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})\nok: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})\n\nTASK [vexxhost.containers.docker : Create systemd socket file for docker] ******\nSunday 01 March 2026  22:05:45 +0000 (0:00:00.658)       0:01:31.408 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Create docker daemon config file] ***********\nSunday 01 March 2026  22:05:45 +0000 (0:00:00.510)       0:01:31.919 **********\nok: [instance]\n\nTASK [vexxhost.containers.docker : Force any restarts if necessary] ************\nSunday 01 March 2026  22:05:46 +0000 (0:00:00.519)       0:01:32.438 **********\n\nTASK [vexxhost.containers.docker : Enable and start service] *******************\nSunday 01 March 2026  22:05:46 +0000 (0:00:00.044)       0:01:32.483 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******\nSunday 01 March 2026  22:05:46 +0000 (0:00:00.489)       0:01:32.972 **********\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)\n\nTASK [vexxhost.ceph.cephadm : Install packages] ********************************\nSunday 01 March 2026  22:05:46 +0000 (0:00:00.147)       0:01:33.120 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure services are started] *********************\nSunday 01 March 2026  22:05:48 +0000 (0:00:01.253)       0:01:34.373 **********\nok: [instance] => (item=chronyd)\nok: [instance] => (item=sshd)\n\nTASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************\nSunday 01 March 2026  22:05:48 +0000 (0:00:00.800)       0:01:35.174 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************\nSunday 01 March 2026  22:05:49 +0000 (0:00:00.355)       0:01:35.530 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************\nSunday 01 March 2026  22:05:49 +0000 (0:00:00.244)       0:01:35.775 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***\nSunday 01 March 2026  22:05:49 +0000 (0:00:00.295)       0:01:36.070 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get monitor status] **********************************\nSunday 01 March 2026  22:05:50 +0000 (0:00:00.282)       0:01:36.353 **********\nok: [instance] => (item=instance)\n\nTASK [vexxhost.ceph.osd : Select admin host] ***********************************\nSunday 01 March 2026  22:05:50 +0000 (0:00:00.328)       0:01:36.681 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get `cephadm ls` status] *****************************\nSunday 01 March 2026  22:05:50 +0000 (0:00:00.093)       0:01:36.775 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Parse the `cephadm ls` output] ***********************\nSunday 01 March 2026  22:05:56 +0000 (0:00:05.570)       0:01:42.345 **********\nok: [instance]\n\nTASK [Install Ceph host] *******************************************************\nSunday 01 March 2026  22:05:56 +0000 (0:00:00.114)       0:01:42.460 **********\nincluded: vexxhost.ceph.cephadm_host for instance\n\nTASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******\nSunday 01 March 2026  22:05:56 +0000 (0:00:00.135)       0:01:42.595 **********\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***\nSunday 01 March 2026  22:05:56 +0000 (0:00:00.125)       0:01:42.721 **********\nskipping: [instance] => (item=instance)\nskipping: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********\nSunday 01 March 2026  22:05:56 +0000 (0:00:00.117)       0:01:42.839 **********\nok: [instance]\n\nTASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************\nSunday 01 March 2026  22:05:56 +0000 (0:00:00.330)       0:01:43.169 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Adopt OSDs to cluster] *******************************\nSunday 01 March 2026  22:05:59 +0000 (0:00:02.771)       0:01:45.940 **********\nskipping: [instance] => (item=osd.0)\nskipping: [instance] => (item=osd.1)\nskipping: [instance] => (item=osd.2)\nskipping: [instance]\n\nTASK [vexxhost.ceph.osd : Wait until OSD added to cephadm] *********************\nSunday 01 March 2026  22:06:09 +0000 (0:00:10.094)       0:01:56.034 **********\nskipping: [instance] => (item=osd.0)\nskipping: [instance] => (item=osd.1)\nskipping: [instance] => (item=osd.2)\nskipping: [instance]\n\nTASK [vexxhost.ceph.osd : Ensure all OSDs are non-legacy] **********************\nSunday 01 March 2026  22:06:09 +0000 (0:00:00.198)       0:01:56.233 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get `ceph-volume lvm list` status] *******************\nSunday 01 March 2026  22:06:15 +0000 (0:00:05.501)       0:02:01.734 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Install OSDs] ****************************************\nSunday 01 March 2026  22:06:26 +0000 (0:00:10.974)       0:02:12.709 **********\nskipping: [instance] => (item=/dev/ceph-instance-osd0/data)\nskipping: [instance] => (item=/dev/ceph-instance-osd1/data)\nskipping: [instance] => (item=/dev/ceph-instance-osd2/data)\nskipping: [instance]\n\nTASK [vexxhost.ceph.osd : Get mon dump] ****************************************\nSunday 01 March 2026  22:06:26 +0000 (0:00:00.209)       0:02:12.919 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Mark require osd release] ****************************\nSunday 01 March 2026  22:06:28 +0000 (0:00:02.177)       0:02:15.097 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Wait for all OSD to be running] **********************\nSunday 01 March 2026  22:06:30 +0000 (0:00:02.155)       0:02:17.252 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/osd/tasks/check-osds.yml for instance\n\nTASK [vexxhost.ceph.osd : Set the retry count] *********************************\nSunday 01 March 2026  22:06:31 +0000 (0:00:00.134)       0:02:17.387 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Get `ceph orch ps`] **********************************\nSunday 01 March 2026  22:06:31 +0000 (0:00:00.093)       0:02:17.480 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : OSD daemon list] *************************************\nSunday 01 March 2026  22:06:33 +0000 (0:00:02.147)       0:02:19.628 **********\nok: [instance]\n\nTASK [vexxhost.ceph.osd : Fail if any OSD not running] *************************\nSunday 01 March 2026  22:06:33 +0000 (0:00:00.088)       0:02:19.716 **********\nskipping: [instance] => (item=1)\nskipping: [instance] => (item=1)\nskipping: [instance] => (item=1)\nskipping: [instance]\n\nTASK [vexxhost.ceph.osd : Fail if any duplicate OSD ID] ************************\nSunday 01 March 2026  22:06:33 +0000 (0:00:00.105)       0:02:19.822 **********\nskipping: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Ensure RBD kernel module is loaded] **************************************\nSunday 01 March 2026  22:06:33 +0000 (0:00:00.115)       0:02:19.937 **********\nok: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:06:34 +0000 (0:00:00.503)       0:02:20.441 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.sysctl : Configure sysctl values] ********************\nSunday 01 March 2026  22:06:38 +0000 (0:00:04.394)       0:02:24.836 **********\nok: [instance] => (item={'name': 'fs.aio-max-nr', 'value': 1048576})\nok: [instance] => (item={'name': 'net.ipv4.tcp_timestamps', 'value': 0})\nok: [instance] => (item={'name': 'net.ipv4.tcp_sack', 'value': 1})\nok: [instance] => (item={'name': 'net.core.netdev_budget', 'value': 1000})\nok: [instance] => (item={'name': 'net.core.netdev_max_backlog', 'value': 250000})\nok: [instance] => (item={'name': 'net.core.rmem_max', 'value': 4194304})\nok: [instance] => (item={'name': 'net.core.wmem_max', 'value': 4194304})\nok: [instance] => (item={'name': 'net.core.rmem_default', 'value': 4194304})\nok: [instance] => (item={'name': 'net.core.wmem_default', 'value': 4194304})\nok: [instance] => (item={'name': 'net.core.optmem_max', 'value': 4194304})\nok: [instance] => (item={'name': 'net.ipv4.tcp_rmem', 'value': '4096 87380 4194304'})\nok: [instance] => (item={'name': 'net.ipv4.tcp_wmem', 'value': '4096 65536 4194304'})\nok: [instance] => (item={'name': 'net.ipv4.tcp_low_latency', 'value': 1})\nok: [instance] => (item={'name': 'net.ipv4.tcp_adv_win_scale', 'value': 1})\nok: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh1', 'value': 128})\nok: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh2', 'value': 28872})\nok: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh3', 'value': 32768})\nok: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh1', 'value': 128})\nok: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh2', 'value': 28872})\nok: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh3', 'value': 32768})\n\nTASK [vexxhost.atmosphere.ethtool : Create folder for persistent configuration] ***\nSunday 01 March 2026  22:06:42 +0000 (0:00:03.967)       0:02:28.803 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ethtool : Install persistent \"ethtool\" tuning] *******\nSunday 01 March 2026  22:06:42 +0000 (0:00:00.308)       0:02:29.111 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ethtool : Run \"ethtool\" tuning] **********************\nSunday 01 March 2026  22:06:43 +0000 (0:00:00.499)       0:02:29.611 **********\nok: [instance]\n\nTASK [Set a fact with the \"atmosphere_images\" for other plays] *****************\nSunday 01 March 2026  22:06:43 +0000 (0:00:00.368)       0:02:29.979 **********\nok: [instance]\n\nPLAY [Configure Kubernetes VIP] ************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:06:43 +0000 (0:00:00.099)       0:02:30.078 **********\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/etc/kubernetes/manifests)] ***\nSunday 01 March 2026  22:06:47 +0000 (0:00:03.966)       0:02:34.045 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Uninstall legacy HA stack] ****************\nSunday 01 March 2026  22:06:47 +0000 (0:00:00.287)       0:02:34.332 **********\nok: [instance] => (item=/etc/keepalived/keepalived.conf)\nok: [instance] => (item=/etc/keepalived/check_apiserver.sh)\nok: [instance] => (item=/etc/kubernetes/manifests/keepalived.yaml)\nok: [instance] => (item=/etc/haproxy/haproxy.cfg)\nok: [instance] => (item=/etc/kubernetes/manifests/haproxy.yaml)\n\nTASK [vexxhost.kubernetes.kube_vip : Switch API server to run on port 6443] ****\nSunday 01 March 2026  22:06:48 +0000 (0:00:00.988)       0:02:35.320 **********\nok: [instance] => (item=/etc/kubernetes/manifests/kube-apiserver.yaml)\nok: [instance] => (item=/etc/kubernetes/controller-manager.conf)\nok: [instance] => (item=/etc/kubernetes/scheduler.conf)\n\nTASK [vexxhost.kubernetes.kube_vip : Check if super-admin.conf exists] *********\nSunday 01 March 2026  22:06:49 +0000 (0:00:00.601)       0:02:35.922 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Check if kubeadm has already run] *********\nSunday 01 March 2026  22:06:49 +0000 (0:00:00.232)       0:02:36.155 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path] ************\nSunday 01 March 2026  22:06:50 +0000 (0:00:00.224)       0:02:36.379 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path (with super-admin.conf)] ***\nSunday 01 March 2026  22:06:50 +0000 (0:00:00.176)       0:02:36.556 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Upload Kubernetes manifest] ***************\nSunday 01 March 2026  22:06:50 +0000 (0:00:00.074)       0:02:36.631 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Ensure kube-vip configuration file] *******\nSunday 01 March 2026  22:06:50 +0000 (0:00:00.582)       0:02:37.213 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kube_vip : Flush handlers] ***************************\nSunday 01 March 2026  22:06:51 +0000 (0:00:00.256)       0:02:37.470 **********\n\nPLAY [Install Kubernetes] ******************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:06:51 +0000 (0:00:00.114)       0:02:37.584 **********\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  22:06:54 +0000 (0:00:03.733)       0:02:41.318 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  22:06:55 +0000 (0:00:00.290)       0:02:41.608 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nSunday 01 March 2026  22:06:55 +0000 (0:00:00.111)       0:02:41.720 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:06:55 +0000 (0:00:00.295)       0:02:42.015 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:06:55 +0000 (0:00:00.103)       0:02:42.119 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:06:56 +0000 (0:00:00.421)       0:02:42.540 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  22:06:56 +0000 (0:00:00.326)       0:02:42.866 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  22:06:56 +0000 (0:00:00.318)       0:02:43.184 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:06:58 +0000 (0:00:01.314)       0:02:44.499 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:06:58 +0000 (0:00:00.123)       0:02:44.622 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:06:58 +0000 (0:00:00.444)       0:02:45.067 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nSunday 01 March 2026  22:07:00 +0000 (0:00:02.107)       0:02:47.175 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nSunday 01 March 2026  22:07:00 +0000 (0:00:00.087)       0:02:47.262 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nSunday 01 March 2026  22:07:01 +0000 (0:00:00.086)       0:02:47.349 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nSunday 01 March 2026  22:07:01 +0000 (0:00:00.086)       0:02:47.436 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nSunday 01 March 2026  22:07:02 +0000 (0:00:01.332)       0:02:48.768 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nSunday 01 March 2026  22:07:02 +0000 (0:00:00.519)       0:02:49.288 **********\nok: [instance] => (item={'path': '/etc/containerd'})\nok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nSunday 01 March 2026  22:07:03 +0000 (0:00:00.992)       0:02:50.280 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nSunday 01 March 2026  22:07:04 +0000 (0:00:00.601)       0:02:50.882 **********\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nSunday 01 March 2026  22:07:04 +0000 (0:00:00.041)       0:02:50.923 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the \"kubeadm-config\" ConfigMap] ***\nSunday 01 March 2026  22:07:05 +0000 (0:00:00.471)       0:02:51.395 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Parse the ClusterConfiguration] ***\nSunday 01 March 2026  22:07:06 +0000 (0:00:00.988)       0:02:52.383 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the current Kubernetes version] ***\nSunday 01 March 2026  22:07:06 +0000 (0:00:00.084)       0:02:52.468 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Extract major, minor, and patch versions] ***\nSunday 01 March 2026  22:07:06 +0000 (0:00:00.078)       0:02:52.547 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Fail if we're jumping more than one minor version] ***\nSunday 01 March 2026  22:07:06 +0000 (0:00:00.081)       0:02:52.629 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes_upgrade_check : Set fact if we need to upgrade] ***\nSunday 01 March 2026  22:07:06 +0000 (0:00:00.074)       0:02:52.704 **********\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  22:07:06 +0000 (0:00:00.119)       0:02:52.823 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:07:06 +0000 (0:00:00.384)       0:02:53.208 **********\nok: [instance] => {\n    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubeadm\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:07:06 +0000 (0:00:00.100)       0:02:53.309 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:07:07 +0000 (0:00:00.385)       0:02:53.694 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  22:07:07 +0000 (0:00:00.134)       0:02:53.829 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:07:07 +0000 (0:00:00.284)       0:02:54.114 **********\nok: [instance] => {\n    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubectl\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:07:07 +0000 (0:00:00.104)       0:02:54.218 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:07:08 +0000 (0:00:00.429)       0:02:54.648 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install SELinux packages] ***************\nSunday 01 March 2026  22:07:08 +0000 (0:00:00.178)       0:02:54.826 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***\nSunday 01 March 2026  22:07:08 +0000 (0:00:00.088)       0:02:54.915 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********\nSunday 01 March 2026  22:07:08 +0000 (0:00:00.087)       0:02:55.003 **********\nskipping: [instance]\n\nTASK [vexxhost.containers.containerd : Install AppArmor packages] **************\nSunday 01 March 2026  22:07:08 +0000 (0:00:00.088)       0:02:55.091 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***\nSunday 01 March 2026  22:07:10 +0000 (0:00:01.429)       0:02:56.521 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Create folders for configuration] *******\nSunday 01 March 2026  22:07:10 +0000 (0:00:00.528)       0:02:57.050 **********\nok: [instance] => (item={'path': '/etc/containerd'})\nok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})\nok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})\n\nTASK [vexxhost.containers.containerd : Create containerd config file] **********\nSunday 01 March 2026  22:07:11 +0000 (0:00:01.009)       0:02:58.059 **********\nok: [instance]\n\nTASK [vexxhost.containers.containerd : Force any restarts if necessary] ********\nSunday 01 March 2026  22:07:12 +0000 (0:00:00.580)       0:02:58.640 **********\n\nTASK [vexxhost.containers.containerd : Enable and start service] ***************\nSunday 01 March 2026  22:07:12 +0000 (0:00:00.037)       0:02:58.677 **********\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  22:07:12 +0000 (0:00:00.449)       0:02:59.127 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:07:13 +0000 (0:00:00.451)       0:02:59.578 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/crictl-v1.34.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:07:13 +0000 (0:00:00.120)       0:02:59.699 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:07:13 +0000 (0:00:00.430)       0:03:00.129 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:07:15 +0000 (0:00:01.245)       0:03:01.374 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/critest-v1.34.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:07:15 +0000 (0:00:00.127)       0:03:01.501 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:07:15 +0000 (0:00:00.414)       0:03:01.915 **********\nok: [instance]\n\nTASK [vexxhost.containers.cri_tools : Create crictl config] ********************\nSunday 01 March 2026  22:07:16 +0000 (0:00:01.192)       0:03:03.108 **********\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/opt/cni/bin)] *********\nSunday 01 March 2026  22:07:17 +0000 (0:00:00.538)       0:03:03.646 **********\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  22:07:17 +0000 (0:00:00.289)       0:03:03.936 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:07:17 +0000 (0:00:00.291)       0:03:04.227 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/containernetworking/plugins/releases/download/v1.8.0/cni-plugins-linux-amd64-v1.8.0.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:07:18 +0000 (0:00:00.115)       0:03:04.343 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:07:18 +0000 (0:00:00.423)       0:03:04.767 **********\nok: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Gather variables for each operating system] ***\nSunday 01 March 2026  22:07:20 +0000 (0:00:02.122)       0:03:06.890 **********\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/containers/roles/cni_plugins/vars/debian.yml)\n\nTASK [vexxhost.containers.cni_plugins : Install additional packages] ***********\nSunday 01 March 2026  22:07:20 +0000 (0:00:00.129)       0:03:07.019 **********\nok: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Ensure IPv6 is enabled] ****************\nSunday 01 March 2026  22:07:21 +0000 (0:00:01.208)       0:03:08.228 **********\nok: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Enable kernel modules on-boot] *********\nSunday 01 March 2026  22:07:22 +0000 (0:00:00.261)       0:03:08.489 **********\nok: [instance]\n\nTASK [vexxhost.containers.cni_plugins : Enable kernel modules in runtime] ******\nSunday 01 March 2026  22:07:22 +0000 (0:00:00.497)       0:03:08.987 **********\nok: [instance] => (item=br_netfilter)\nok: [instance] => (item=ip_tables)\nok: [instance] => (item=ip6_tables)\nok: [instance] => (item=nf_conntrack)\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  22:07:23 +0000 (0:00:00.829)       0:03:09.817 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:07:23 +0000 (0:00:00.301)       0:03:10.118 **********\nok: [instance] => {\n    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubelet\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:07:23 +0000 (0:00:00.110)       0:03:10.228 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:07:24 +0000 (0:00:00.460)       0:03:10.688 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Gather variables for each operating system] ***\nSunday 01 March 2026  22:07:24 +0000 (0:00:00.281)       0:03:10.970 **********\nok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubelet/vars/debian.yml)\n\nTASK [vexxhost.kubernetes.kubelet : Install coreutils] *************************\nSunday 01 March 2026  22:07:24 +0000 (0:00:00.140)       0:03:11.111 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Install additional packages] ***************\nSunday 01 March 2026  22:07:24 +0000 (0:00:00.094)       0:03:11.205 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Configure sysctl values] *******************\nSunday 01 March 2026  22:07:26 +0000 (0:00:01.233)       0:03:12.439 **********\nok: [instance] => (item={'name': 'net.ipv4.ip_forward', 'value': 1})\nok: [instance] => (item={'name': 'net.bridge.bridge-nf-call-iptables', 'value': 1})\nok: [instance] => (item={'name': 'net.bridge.bridge-nf-call-ip6tables', 'value': 1})\nok: [instance] => (item={'name': 'net.ipv4.conf.all.rp_filter', 'value': 0})\nok: [instance] => (item={'name': 'fs.inotify.max_queued_events', 'value': 1048576})\nok: [instance] => (item={'name': 'fs.inotify.max_user_instances', 'value': 8192})\nok: [instance] => (item={'name': 'fs.inotify.max_user_watches', 'value': 1048576})\n\nTASK [vexxhost.kubernetes.kubelet : Create folders for kubernetes configuration] ***\nSunday 01 March 2026  22:07:27 +0000 (0:00:01.359)       0:03:13.798 **********\nok: [instance] => (item=/etc/systemd/system/kubelet.service.d)\nok: [instance] => (item=/etc/kubernetes)\nok: [instance] => (item=/etc/kubernetes/manifests)\n\nTASK [vexxhost.kubernetes.kubelet : Add kubelet systemd service config] ********\nSunday 01 March 2026  22:07:28 +0000 (0:00:00.629)       0:03:14.427 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Add kubeadm dropin for kubelet systemd service config] ***\nSunday 01 March 2026  22:07:28 +0000 (0:00:00.478)       0:03:14.906 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Check swap status] *************************\nSunday 01 March 2026  22:07:29 +0000 (0:00:00.491)       0:03:15.397 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Disable swap] ******************************\nSunday 01 March 2026  22:07:29 +0000 (0:00:00.269)       0:03:15.667 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Remove swapfile from /etc/fstab] ***********\nSunday 01 March 2026  22:07:29 +0000 (0:00:00.075)       0:03:15.742 **********\nok: [instance] => (item=swap)\nok: [instance] => (item=none)\n\nTASK [vexxhost.kubernetes.kubelet : Create noswap systemd service config file] ***\nSunday 01 March 2026  22:07:29 +0000 (0:00:00.543)       0:03:16.286 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Enable noswap service] *********************\nSunday 01 March 2026  22:07:30 +0000 (0:00:00.493)       0:03:16.779 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Force any restarts if necessary] ***********\nSunday 01 March 2026  22:07:30 +0000 (0:00:00.445)       0:03:17.225 **********\n\nTASK [vexxhost.kubernetes.kubelet : Enable and start kubelet service] **********\nSunday 01 March 2026  22:07:30 +0000 (0:00:00.034)       0:03:17.259 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Ensure availability of dbus on Debian] *****\nSunday 01 March 2026  22:07:31 +0000 (0:00:00.405)       0:03:17.665 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Configure short hostname] ******************\nSunday 01 March 2026  22:07:32 +0000 (0:00:01.165)       0:03:18.830 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubelet : Ensure hostname inside hosts file] *********\nSunday 01 March 2026  22:07:35 +0000 (0:00:02.561)       0:03:21.392 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Remove kubernetes repository] ***********\nSunday 01 March 2026  22:07:35 +0000 (0:00:00.267)       0:03:21.659 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Setup control plane] ********************\nSunday 01 March 2026  22:07:35 +0000 (0:00:00.607)       0:03:22.267 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/control-plane.yml for instance\n\nTASK [vexxhost.kubernetes.kubernetes : Bootstrap cluster] **********************\nSunday 01 March 2026  22:07:36 +0000 (0:00:00.144)       0:03:22.412 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/bootstrap-cluster.yml for instance\n\nTASK [vexxhost.kubernetes.kubernetes : Check if any control plane is bootstrapped] ***\nSunday 01 March 2026  22:07:36 +0000 (0:00:00.142)       0:03:22.555 **********\nok: [instance] => (item=instance)\n\nTASK [vexxhost.kubernetes.kubernetes : Pick node from pre-existing cluster] ****\nSunday 01 March 2026  22:07:36 +0000 (0:00:00.296)       0:03:22.852 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Select first node to initialize cluster] ***\nSunday 01 March 2026  22:07:36 +0000 (0:00:00.079)       0:03:22.931 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Print selected bootstrap node] **********\nSunday 01 March 2026  22:07:36 +0000 (0:00:00.075)       0:03:23.007 **********\nok: [instance] => {\n    \"msg\": \"instance\"\n}\n\nTASK [vexxhost.kubernetes.kubernetes : Upload cluster configuration for bootstrap node] ***\nSunday 01 March 2026  22:07:36 +0000 (0:00:00.071)       0:03:23.079 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create pki folder] **********************\nSunday 01 March 2026  22:07:37 +0000 (0:00:00.710)       0:03:23.789 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca key] ***************\nSunday 01 March 2026  22:07:37 +0000 (0:00:00.278)       0:03:24.068 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca cert] **************\nSunday 01 March 2026  22:07:37 +0000 (0:00:00.073)       0:03:24.141 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create etcd-ca key] *********************\nSunday 01 March 2026  22:07:37 +0000 (0:00:00.070)       0:03:24.212 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create etcd-ca cert] ********************\nSunday 01 March 2026  22:07:37 +0000 (0:00:00.071)       0:03:24.284 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca key] **************\nSunday 01 March 2026  22:07:38 +0000 (0:00:00.074)       0:03:24.359 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca cert] *************\nSunday 01 March 2026  22:07:38 +0000 (0:00:00.066)       0:03:24.425 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Initialize cluster] *********************\nSunday 01 March 2026  22:07:38 +0000 (0:00:00.057)       0:03:24.483 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Check if the node is already part of the cluster] ***\nSunday 01 March 2026  22:07:38 +0000 (0:00:00.227)       0:03:24.710 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Join cluster] ***************************\nSunday 01 March 2026  22:07:38 +0000 (0:00:00.246)       0:03:24.957 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Create folder for admin configuration] ***\nSunday 01 March 2026  22:07:38 +0000 (0:00:00.068)       0:03:25.025 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Creating a symlink for admin configuration file] ***\nSunday 01 March 2026  22:07:38 +0000 (0:00:00.242)       0:03:25.267 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Add bash autocomplete for kubectl] ******\nSunday 01 March 2026  22:07:39 +0000 (0:00:00.238)       0:03:25.506 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Install PIP] ****************************\nSunday 01 March 2026  22:07:39 +0000 (0:00:00.236)       0:03:25.743 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using pip on supported systems] ***\nSunday 01 March 2026  22:07:40 +0000 (0:00:01.138)       0:03:26.882 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Enable EPEL repository] *****************\nSunday 01 March 2026  22:07:41 +0000 (0:00:01.214)       0:03:28.097 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using package manager on supported systems] ***\nSunday 01 March 2026  22:07:41 +0000 (0:00:00.090)       0:03:28.187 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Allow workload on control plane node] ***\nSunday 01 March 2026  22:07:41 +0000 (0:00:00.082)       0:03:28.269 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Remove kube-proxy resources] ************\nSunday 01 March 2026  22:07:42 +0000 (0:00:00.804)       0:03:29.073 **********\nskipping: [instance] => (item=DaemonSet)\nskipping: [instance] => (item=ConfigMap)\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Start an upgrade] ***********************\nSunday 01 March 2026  22:07:42 +0000 (0:00:00.074)       0:03:29.148 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Check if the Kubernetes API services is up to date] ***\nSunday 01 March 2026  22:07:42 +0000 (0:00:00.082)       0:03:29.230 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Trigger an upgrade of the Kubernetes API services] ***\nSunday 01 March 2026  22:07:42 +0000 (0:00:00.083)       0:03:29.314 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Setup nodes] ****************************\nSunday 01 March 2026  22:07:43 +0000 (0:00:00.076)       0:03:29.390 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Upgrade & restart Kubelet node for upgrade (if needed)] ***\nSunday 01 March 2026  22:07:43 +0000 (0:00:00.064)       0:03:29.454 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.kubernetes : Set node selector for CoreDNS components] ***\nSunday 01 March 2026  22:07:43 +0000 (0:00:00.541)       0:03:29.995 **********\nok: [instance]\n\nPLAY [Install control-plane components] ****************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:07:44 +0000 (0:00:01.125)       0:03:31.121 **********\nok: [instance]\n\nTASK [vexxhost.containers.forget_package : Forget package] *********************\nSunday 01 March 2026  22:07:49 +0000 (0:00:05.160)       0:03:36.282 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  22:07:50 +0000 (0:00:00.286)       0:03:36.568 **********\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nSunday 01 March 2026  22:07:51 +0000 (0:00:01.222)       0:03:37.791 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:07:51 +0000 (0:00:00.277)       0:03:38.068 **********\nok: [instance] => {\n    \"msg\": \"https://get.helm.sh/helm-v3.11.2-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:07:51 +0000 (0:00:00.109)       0:03:38.177 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:07:52 +0000 (0:00:00.389)       0:03:38.567 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.helm : Remove Helm repository] ***********************\nSunday 01 March 2026  22:07:53 +0000 (0:00:01.321)       0:03:39.889 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.helm : Add bash autocomplete for helm] ***************\nSunday 01 March 2026  22:07:53 +0000 (0:00:00.388)       0:03:40.277 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.helm : Get Helm plugins dir] *************************\nSunday 01 March 2026  22:07:54 +0000 (0:00:00.249)       0:03:40.527 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.helm : Create Helm plugins directory if it does not exist] ***\nSunday 01 March 2026  22:07:54 +0000 (0:00:00.313)       0:03:40.840 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.helm : Remove Helm diff plugin installed by kubernetes.core.helm_plugin] ***\nSunday 01 March 2026  22:07:54 +0000 (0:00:00.250)       0:03:41.091 **********\nok: [instance]\n\nTASK [Install plugin] **********************************************************\nSunday 01 March 2026  22:07:54 +0000 (0:00:00.235)       0:03:41.327 **********\nincluded: vexxhost.containers.download_artifact for instance\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:07:55 +0000 (0:00:00.116)       0:03:41.443 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/databus23/helm-diff/releases/download/v3.8.1/helm-diff-linux-amd64.tgz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:07:55 +0000 (0:00:00.215)       0:03:41.658 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:07:55 +0000 (0:00:00.385)       0:03:42.044 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:07:57 +0000 (0:00:01.459)       0:03:43.504 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:07:57 +0000 (0:00:00.104)       0:03:43.609 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.cilium : Get current Kubernetes version] *************\nSunday 01 March 2026  22:07:57 +0000 (0:00:00.687)       0:03:44.296 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.cilium : Deploy Helm chart] **************************\nSunday 01 March 2026  22:07:58 +0000 (0:00:00.999)       0:03:45.295 **********\nok: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:08:01 +0000 (0:00:02.091)       0:03:47.387 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kubernetes_node_labels : Add labels to node] *********\nSunday 01 March 2026  22:08:05 +0000 (0:00:04.219)       0:03:51.606 **********\nok: [instance]\n\nPLAY [all] *********************************************************************\n\nTASK [Uninstall unattended-upgrades] *******************************************\nSunday 01 March 2026  22:08:06 +0000 (0:00:00.818)       0:03:52.425 **********\nok: [instance]\n\nPLAY [controllers] *************************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:08:07 +0000 (0:00:00.940)       0:03:53.366 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:08:12 +0000 (0:00:05.120)       0:03:58.486 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:08:12 +0000 (0:00:00.111)       0:03:58.598 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.local_path_provisioner : Deploy Helm chart] **********\nSunday 01 March 2026  22:08:12 +0000 (0:00:00.576)       0:03:59.174 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:08:14 +0000 (0:00:01.367)       0:04:00.542 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Collect \"ceph mon dump\" output from a monitor] ***\nSunday 01 March 2026  22:08:14 +0000 (0:00:00.097)       0:04:00.639 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Generate fact with list of Ceph monitors] ***\nSunday 01 March 2026  22:08:14 +0000 (0:00:00.113)       0:04:00.753 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Create Ceph pool] *********************\nSunday 01 March 2026  22:08:14 +0000 (0:00:00.070)       0:04:00.823 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Create user client.kube] **************\nSunday 01 March 2026  22:08:14 +0000 (0:00:00.115)       0:04:00.939 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Retrieve keyring for client.kube] *****\nSunday 01 March 2026  22:08:14 +0000 (0:00:00.141)       0:04:01.081 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Store keyring inside fact] ************\nSunday 01 March 2026  22:08:14 +0000 (0:00:00.126)       0:04:01.208 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ceph_csi_rbd : Deploy Helm chart] ********************\nSunday 01 March 2026  22:08:15 +0000 (0:00:00.227)       0:04:01.435 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Clone PowerStore CSI from GitHub] ***\nSunday 01 March 2026  22:08:15 +0000 (0:00:00.141)       0:04:01.577 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Create Secret] **********************\nSunday 01 March 2026  22:08:15 +0000 (0:00:00.065)       0:04:01.642 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Create StorageClass] ****************\nSunday 01 March 2026  22:08:15 +0000 (0:00:00.073)       0:04:01.715 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.powerstore_csi : Deploy Helm chart] ******************\nSunday 01 March 2026  22:08:15 +0000 (0:00:00.074)       0:04:01.790 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Add backports PPA] **********************\nSunday 01 March 2026  22:08:15 +0000 (0:00:00.149)       0:04:01.939 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Install the multipathd package] *********\nSunday 01 March 2026  22:08:15 +0000 (0:00:00.086)       0:04:02.025 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Install the configuration file] *********\nSunday 01 March 2026  22:08:15 +0000 (0:00:00.067)       0:04:02.092 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Detect if InitiatorName is set] ***********\nSunday 01 March 2026  22:08:15 +0000 (0:00:00.100)       0:04:02.193 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Generate a new InitiatorName] *************\nSunday 01 March 2026  22:08:15 +0000 (0:00:00.061)       0:04:02.254 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Write the new InitiatorName] **************\nSunday 01 March 2026  22:08:15 +0000 (0:00:00.082)       0:04:02.337 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Create namespace] *************************\nSunday 01 March 2026  22:08:16 +0000 (0:00:00.064)       0:04:02.402 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Install Portworx] *************************\nSunday 01 March 2026  22:08:16 +0000 (0:00:00.059)       0:04:02.462 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Wait till the CRDs are created] ***********\nSunday 01 March 2026  22:08:16 +0000 (0:00:00.069)       0:04:02.531 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.portworx : Create Portworx Storage Cluster] **********\nSunday 01 March 2026  22:08:16 +0000 (0:00:00.063)       0:04:02.594 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.storpool_csi : Deploy CSI RBAC] **********************\nSunday 01 March 2026  22:08:16 +0000 (0:00:00.151)       0:04:02.746 **********\nskipping: [instance] => (item={'name': 'controllerplugin'})\nskipping: [instance] => (item={'name': 'nodeplugin'})\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.storpool_csi : Deploy CSI] ***************************\nSunday 01 March 2026  22:08:16 +0000 (0:00:00.089)       0:04:02.835 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.storpool_csi : Create StorageClass] ******************\nSunday 01 March 2026  22:08:16 +0000 (0:00:00.065)       0:04:02.900 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ibm_block_csi_driver : Deploy CSI] *******************\nSunday 01 March 2026  22:08:16 +0000 (0:00:00.147)       0:04:03.048 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ibm_block_csi_driver : Create Secret] ****************\nSunday 01 March 2026  22:08:16 +0000 (0:00:00.212)       0:04:03.261 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ibm_block_csi_driver : Create StorageClass] **********\nSunday 01 March 2026  22:08:16 +0000 (0:00:00.069)       0:04:03.331 **********\nskipping: [instance]\n\nPLAY [Deploy Infrastructure] ***************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:08:17 +0000 (0:00:00.159)       0:04:03.491 **********\nok: [instance]\n\nTASK [Deploy Helm chart] *******************************************************\nSunday 01 March 2026  22:08:20 +0000 (0:00:03.807)       0:04:07.298 **********\nincluded: vexxhost.kubernetes.cert_manager for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:08:21 +0000 (0:00:00.116)       0:04:07.414 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:08:21 +0000 (0:00:00.107)       0:04:07.522 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.cert_manager : Deploy Helm chart] ********************\nSunday 01 March 2026  22:08:21 +0000 (0:00:00.593)       0:04:08.116 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.cluster_issuer : Create self-signed cluster issuer] ***\nSunday 01 March 2026  22:08:24 +0000 (0:00:02.271)       0:04:10.387 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.cluster_issuer : Import tasks for ClusterIssuer type] ***\nSunday 01 March 2026  22:08:25 +0000 (0:00:01.035)       0:04:11.422 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/atmosphere/roles/cluster_issuer/tasks/type/self-signed/main.yml for instance\n\nTASK [vexxhost.atmosphere.cluster_issuer : Create ClusterIssuer] ***************\nSunday 01 March 2026  22:08:25 +0000 (0:00:00.107)       0:04:11.529 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.cluster_issuer : Wait till the secret is created] ****\nSunday 01 March 2026  22:08:25 +0000 (0:00:00.760)       0:04:12.290 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.cluster_issuer : Copy CA certificate on host] ********\nSunday 01 March 2026  22:08:26 +0000 (0:00:00.687)       0:04:12.978 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.cluster_issuer : Flush all handlers] *****************\nSunday 01 March 2026  22:08:27 +0000 (0:00:00.508)       0:04:13.486 **********\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:08:27 +0000 (0:00:00.107)       0:04:13.594 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:08:27 +0000 (0:00:00.124)       0:04:13.718 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ingress_nginx : Deploy Helm chart] *******************\nSunday 01 March 2026  22:08:27 +0000 (0:00:00.585)       0:04:14.303 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:08:29 +0000 (0:00:01.721)       0:04:16.025 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:08:29 +0000 (0:00:00.115)       0:04:16.140 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq_cluster_operator : Install all CRDs] ********\nSunday 01 March 2026  22:08:30 +0000 (0:00:00.611)       0:04:16.752 **********\nok: [instance] => (item=messaging-topology-operator)\nok: [instance] => (item=rabbitmq-cluster)\n\nTASK [vexxhost.atmosphere.rabbitmq_cluster_operator : Deploy Helm chart] *******\nSunday 01 March 2026  22:08:33 +0000 (0:00:03.361)       0:04:20.113 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:08:36 +0000 (0:00:02.452)       0:04:22.566 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:08:36 +0000 (0:00:00.117)       0:04:22.683 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster_operator : Install all CRDs] ***\nSunday 01 March 2026  22:08:36 +0000 (0:00:00.573)       0:04:23.256 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster_operator : Deploy Helm chart] ***\nSunday 01 March 2026  22:08:40 +0000 (0:00:03.430)       0:04:26.687 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Check if the Percona XtraDB cluster secret exists] ***\nSunday 01 March 2026  22:08:42 +0000 (0:00:01.688)       0:04:28.375 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Create a secret] ************\nSunday 01 March 2026  22:08:42 +0000 (0:00:00.736)       0:04:29.111 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Check if the Percona XtraDB cluster exists] ***\nSunday 01 March 2026  22:08:42 +0000 (0:00:00.072)       0:04:29.184 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Get current status of the cluster] ***\nSunday 01 March 2026  22:08:43 +0000 (0:00:00.780)       0:04:29.964 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Assert that the cluster is healthy before upgrade] ***\nSunday 01 March 2026  22:08:43 +0000 (0:00:00.087)       0:04:30.052 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Stop PXC-operator] **********\nSunday 01 March 2026  22:08:43 +0000 (0:00:00.082)       0:04:30.135 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Change the cluster Statefulset image to 8.0] ***\nSunday 01 March 2026  22:08:43 +0000 (0:00:00.079)       0:04:30.214 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Wait until the cluster Statefulset rollout] ***\nSunday 01 March 2026  22:08:43 +0000 (0:00:00.081)       0:04:30.296 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Update pxc cluster spec] ****\nSunday 01 March 2026  22:08:44 +0000 (0:00:00.078)       0:04:30.374 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Start PXC-operator] *********\nSunday 01 March 2026  22:08:44 +0000 (0:00:00.082)       0:04:30.457 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Apply Percona XtraDB cluster] ***\nSunday 01 March 2026  22:08:44 +0000 (0:00:00.082)       0:04:30.539 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.percona_xtradb_cluster : Create percona haproxy metric service] ***\nSunday 01 March 2026  22:08:45 +0000 (0:00:00.894)       0:04:31.433 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:08:45 +0000 (0:00:00.838)       0:04:32.271 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:08:46 +0000 (0:00:00.108)       0:04:32.380 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.valkey : Create TLS resources] ***********************\nSunday 01 March 2026  22:08:46 +0000 (0:00:00.579)       0:04:32.960 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.valkey : Deploy Helm chart] **************************\nSunday 01 March 2026  22:08:47 +0000 (0:00:00.805)       0:04:33.766 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:08:49 +0000 (0:00:01.961)       0:04:35.727 **********\nskipping: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:08:49 +0000 (0:00:00.076)       0:04:35.804 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:08:49 +0000 (0:00:00.075)       0:04:35.879 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:08:49 +0000 (0:00:00.223)       0:04:36.102 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:08:49 +0000 (0:00:00.067)       0:04:36.170 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:08:50 +0000 (0:00:00.708)       0:04:36.878 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:08:50 +0000 (0:00:00.084)       0:04:36.963 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:08:50 +0000 (0:00:00.074)       0:04:37.037 **********\nok: [instance] => (item=oslo_db)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:08:50 +0000 (0:00:00.087)       0:04:37.125 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:08:50 +0000 (0:00:00.113)       0:04:37.239 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:08:51 +0000 (0:00:00.105)       0:04:37.344 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Get the Kuberentes service for Percona XtraDB Cluster] ***\nSunday 01 March 2026  22:08:51 +0000 (0:00:00.594)       0:04:37.939 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Install MySQL python package] *************\nSunday 01 March 2026  22:08:52 +0000 (0:00:00.752)       0:04:38.691 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Check MySQL ready] ************************\nSunday 01 March 2026  22:08:53 +0000 (0:00:01.002)       0:04:39.694 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Create Keycloak database] *****************\nSunday 01 March 2026  22:08:53 +0000 (0:00:00.513)       0:04:40.207 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Create a Keycloak user] *******************\nSunday 01 March 2026  22:08:54 +0000 (0:00:00.539)       0:04:40.747 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Disable pxc strict mode] ******************\nSunday 01 March 2026  22:08:54 +0000 (0:00:00.523)       0:04:41.270 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Deploy Helm chart] ************************\nSunday 01 March 2026  22:08:55 +0000 (0:00:00.488)       0:04:41.759 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Wait until keycloak ready] ****************\nSunday 01 March 2026  22:08:57 +0000 (0:00:01.642)       0:04:43.402 **********\nok: [instance]\n\nTASK [Create Keycloak Ingress] *************************************************\nSunday 01 March 2026  22:08:57 +0000 (0:00:00.726)       0:04:44.128 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress keycloak] *******************\nSunday 01 March 2026  22:08:57 +0000 (0:00:00.112)       0:04:44.241 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keycloak : Enable pxc strict mode] *******************\nSunday 01 March 2026  22:08:59 +0000 (0:00:01.189)       0:04:45.430 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keepalived : Deploy service] *************************\nSunday 01 March 2026  22:08:59 +0000 (0:00:00.359)       0:04:45.790 **********\nok: [instance]\n\nPLAY [Deploy Monitoring] *******************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:09:00 +0000 (0:00:00.888)       0:04:46.678 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:09:04 +0000 (0:00:04.314)       0:04:50.992 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:09:04 +0000 (0:00:00.113)       0:04:51.106 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.node_feature_discovery : Install all CRDs] ***********\nSunday 01 March 2026  22:09:05 +0000 (0:00:00.594)       0:04:51.700 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.node_feature_discovery : Deploy Helm chart] **********\nSunday 01 March 2026  22:09:06 +0000 (0:00:00.868)       0:04:52.569 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:09:07 +0000 (0:00:01.605)       0:04:54.175 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:09:07 +0000 (0:00:00.106)       0:04:54.282 **********\nok: [instance]\n\nTASK [atmosphere.common.secretgen_controller : Deploy secretgen-controller] ****\nSunday 01 March 2026  22:09:08 +0000 (0:00:00.643)       0:04:54.925 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Wait until Keycloak service is ready] ***\nSunday 01 March 2026  22:09:09 +0000 (0:00:01.346)       0:04:56.272 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Create Keycloak realm] *******\nSunday 01 March 2026  22:09:10 +0000 (0:00:00.725)       0:04:56.997 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Add client roles in \"id_token\"] ***\nSunday 01 March 2026  22:09:12 +0000 (0:00:01.457)       0:04:58.455 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Retrieve \"etcd\" CA certificate] ***\nSunday 01 March 2026  22:09:13 +0000 (0:00:01.284)       0:04:59.740 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Retrieve \"etcd\" client certificate] ***\nSunday 01 March 2026  22:09:13 +0000 (0:00:00.476)       0:05:00.216 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Retrieve \"etcd\" client key] ***\nSunday 01 March 2026  22:09:14 +0000 (0:00:00.242)       0:05:00.458 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Create secrets for monitoring] ***\nSunday 01 March 2026  22:09:14 +0000 (0:00:00.230)       0:05:00.689 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Generate client secret passwords] ***\nSunday 01 March 2026  22:09:15 +0000 (0:00:00.724)       0:05:01.414 **********\nok: [instance] => (item=alertmanager)\nok: [instance] => (item=grafana)\nok: [instance] => (item=prometheus)\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Collect all client secrets] ***\nSunday 01 March 2026  22:09:17 +0000 (0:00:02.144)       0:05:03.558 **********\nok: [instance] => (item=alertmanager)\nok: [instance] => (item=grafana)\nok: [instance] => (item=prometheus)\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Create Keycloak clients] *****\nSunday 01 March 2026  22:09:19 +0000 (0:00:01.988)       0:05:05.547 **********\nok: [instance] => (item=None)\nok: [instance] => (item=None)\nok: [instance] => (item=None)\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Create Keycloak roles] *******\nSunday 01 March 2026  22:09:22 +0000 (0:00:03.413)       0:05:08.960 **********\nok: [instance] => (item=None)\nok: [instance] => (item=None)\nok: [instance] => (item=None)\nok: [instance] => (item=None)\nok: [instance] => (item=None)\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Generate cookie secrets] *****\nSunday 01 March 2026  22:09:27 +0000 (0:00:04.625)       0:05:13.586 **********\nok: [instance] => (item=alertmanager)\nok: [instance] => (item=prometheus)\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Generate OAuth2 proxy configuration] ***\nSunday 01 March 2026  22:09:28 +0000 (0:00:01.443)       0:05:15.029 **********\nok: [instance] => (item=alertmanager)\nok: [instance] => (item=prometheus)\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Create certificate issuer] ***\nSunday 01 March 2026  22:09:30 +0000 (0:00:01.487)       0:05:16.517 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Install all CRDs] ************\nSunday 01 March 2026  22:09:30 +0000 (0:00:00.725)       0:05:17.242 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Deploy additional dashboards] ***\nSunday 01 March 2026  22:09:39 +0000 (0:00:08.398)       0:05:25.641 **********\nok: [instance] => (item={'name': 'haproxy', 'state': 'present'})\nok: [instance] => (item={'name': 'goldpinger', 'state': 'present'})\nok: [instance] => (item={'name': 'node-exporter-full', 'state': 'present'})\nok: [instance] => (item={'name': 'ceph-cluster', 'state': 'present'})\nok: [instance] => (item={'name': 'ceph-cluster-advanced', 'state': 'present'})\nok: [instance] => (item={'name': 'hosts-overview', 'state': 'present'})\nok: [instance] => (item={'name': 'host-details', 'state': 'present'})\nok: [instance] => (item={'name': 'pool-overview', 'state': 'present'})\nok: [instance] => (item={'name': 'pool-detail', 'state': 'present'})\nok: [instance] => (item={'name': 'osds-overview', 'state': 'present'})\nok: [instance] => (item={'name': 'osd-device-details', 'state': 'present'})\nok: [instance] => (item={'name': 'rbd-overview', 'state': 'present'})\nok: [instance] => (item={'name': 'rbd-details', 'state': 'present'})\n\nTASK [vexxhost.atmosphere.kube_prometheus_stack : Deploy Helm chart] ***********\nSunday 01 March 2026  22:09:54 +0000 (0:00:15.570)       0:05:41.211 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:10:10 +0000 (0:00:15.437)       0:05:56.649 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:10:10 +0000 (0:00:00.125)       0:05:56.774 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.loki : Deploy Helm chart] ****************************\nSunday 01 March 2026  22:10:11 +0000 (0:00:00.653)       0:05:57.427 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:10:14 +0000 (0:00:03.197)       0:06:00.624 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:10:14 +0000 (0:00:00.121)       0:06:00.746 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.vector : Deploy Helm chart] **************************\nSunday 01 March 2026  22:10:15 +0000 (0:00:00.624)       0:06:01.371 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:10:16 +0000 (0:00:01.624)       0:06:02.995 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:10:16 +0000 (0:00:00.121)       0:06:03.117 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.goldpinger : Deploy Helm chart] **********************\nSunday 01 March 2026  22:10:17 +0000 (0:00:00.602)       0:06:03.719 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ipmi_exporter : Deploy service] **********************\nSunday 01 March 2026  22:10:18 +0000 (0:00:01.592)       0:06:05.312 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:10:19 +0000 (0:00:00.902)       0:06:06.214 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:10:19 +0000 (0:00:00.124)       0:06:06.339 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.prometheus_pushgateway : Deploy Helm chart] **********\nSunday 01 March 2026  22:10:20 +0000 (0:00:00.593)       0:06:06.933 **********\nok: [instance]\n\nPLAY [Deploy OpenStack] ********************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:10:22 +0000 (0:00:01.774)       0:06:08.707 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:10:26 +0000 (0:00:04.305)       0:06:13.012 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:10:26 +0000 (0:00:00.119)       0:06:13.132 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:10:26 +0000 (0:00:00.082)       0:06:13.214 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:10:26 +0000 (0:00:00.088)       0:06:13.303 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:10:27 +0000 (0:00:00.081)       0:06:13.384 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:10:27 +0000 (0:00:00.078)       0:06:13.463 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:10:27 +0000 (0:00:00.079)       0:06:13.543 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:10:27 +0000 (0:00:00.086)       0:06:13.630 **********\nok: [instance] => (item=oslo_cache)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:10:27 +0000 (0:00:00.096)       0:06:13.726 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:10:27 +0000 (0:00:00.130)       0:06:13.857 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:10:27 +0000 (0:00:00.121)       0:06:13.978 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.memcached : Deploy Helm chart] ***********************\nSunday 01 March 2026  22:10:28 +0000 (0:00:00.631)       0:06:14.609 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.memcached : Apply manifests for monitoring] **********\nSunday 01 March 2026  22:10:30 +0000 (0:00:01.880)       0:06:16.490 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstacksdk : Install openstacksdk] *****************\nSunday 01 March 2026  22:10:30 +0000 (0:00:00.790)       0:06:17.281 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstacksdk : Create openstack config directory] ****\nSunday 01 March 2026  22:10:31 +0000 (0:00:01.039)       0:06:18.320 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstacksdk : Generate cloud config file] ***********\nSunday 01 March 2026  22:10:32 +0000 (0:00:00.294)       0:06:18.615 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:10:32 +0000 (0:00:00.619)       0:06:19.234 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:10:33 +0000 (0:00:00.123)       0:06:19.358 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  22:10:33 +0000 (0:00:00.130)       0:06:19.489 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  22:10:33 +0000 (0:00:00.775)       0:06:20.265 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  22:10:34 +0000 (0:00:00.186)       0:06:20.451 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  22:10:34 +0000 (0:00:00.195)       0:06:20.647 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  22:10:34 +0000 (0:00:00.188)       0:06:20.835 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:10:35 +0000 (0:00:00.936)       0:06:21.772 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:10:36 +0000 (0:00:00.718)       0:06:22.490 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:10:36 +0000 (0:00:00.095)       0:06:22.586 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:10:36 +0000 (0:00:00.079)       0:06:22.665 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:10:36 +0000 (0:00:00.088)       0:06:22.754 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:10:36 +0000 (0:00:00.077)       0:06:22.831 **********\nok: [instance] => (item=identity)\nok: [instance] => (item=oslo_messaging)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:10:36 +0000 (0:00:00.121)       0:06:22.952 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:10:36 +0000 (0:00:00.108)       0:06:23.061 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:10:36 +0000 (0:00:00.110)       0:06:23.172 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Create Keycloak realms] *******************\nSunday 01 March 2026  22:10:37 +0000 (0:00:00.629)       0:06:23.802 **********\nok: [instance] => (item=None)\nok: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Setup Keycloak Authentication Required Actions (MFA)] ***\nSunday 01 March 2026  22:10:38 +0000 (0:00:01.285)       0:06:25.087 **********\nok: [instance] => (item=atmosphere)\n\nTASK [vexxhost.atmosphere.keystone : Create ConfigMap with all OpenID connect configurations] ***\nSunday 01 March 2026  22:10:40 +0000 (0:00:01.303)       0:06:26.391 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Create Keycloak clients] ******************\nSunday 01 March 2026  22:10:41 +0000 (0:00:00.995)       0:06:27.386 **********\nok: [instance] => (item=None)\nok: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Assign realm-management roles to service account] ***\nSunday 01 March 2026  22:10:42 +0000 (0:00:00.993)       0:06:28.380 **********\nok: [instance] => (item=None)\nok: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Deploy Helm chart] ************************\nSunday 01 March 2026  22:10:43 +0000 (0:00:01.260)       0:06:29.641 **********\nok: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:10:46 +0000 (0:00:03.240)       0:06:32.881 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:10:46 +0000 (0:00:00.188)       0:06:33.070 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:10:46 +0000 (0:00:00.086)       0:06:33.156 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:10:46 +0000 (0:00:00.077)       0:06:33.234 **********\nok: [instance]\n\nTASK [Create Ingress identity] *************************************************\nSunday 01 March 2026  22:10:46 +0000 (0:00:00.098)       0:06:33.333 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress identity] *******************\nSunday 01 March 2026  22:10:47 +0000 (0:00:00.140)       0:06:33.473 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Validate if ingress is reachable] *********\nSunday 01 March 2026  22:10:48 +0000 (0:00:01.139)       0:06:34.613 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Wait until identity service ready] ********\nSunday 01 March 2026  22:10:48 +0000 (0:00:00.386)       0:06:34.999 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.keystone : Create Keystone domains] ******************\nSunday 01 March 2026  22:10:49 +0000 (0:00:00.810)       0:06:35.809 **********\nok: [instance] => (item=atmosphere)\n\nTASK [vexxhost.atmosphere.keystone : Create Keystone identity providers] *******\nSunday 01 March 2026  22:10:50 +0000 (0:00:01.236)       0:06:37.046 **********\nok: [instance] => (item=atmosphere)\n\nTASK [vexxhost.atmosphere.keystone : Create Keystone federation mappings] ******\nSunday 01 March 2026  22:10:51 +0000 (0:00:01.255)       0:06:38.301 **********\nok: [instance] => (item=atmosphere)\n\nTASK [vexxhost.atmosphere.keystone : Create Keystone federation protocols] *****\nSunday 01 March 2026  22:10:53 +0000 (0:00:01.171)       0:06:39.473 **********\nok: [instance] => (item=atmosphere)\n\nTASK [vexxhost.containers.directory : Create directory (/etc/nerdctl)] *********\nSunday 01 March 2026  22:10:54 +0000 (0:00:01.330)       0:06:40.803 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  22:10:54 +0000 (0:00:00.331)       0:06:41.134 **********\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nSunday 01 March 2026  22:10:56 +0000 (0:00:01.736)       0:06:42.871 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:10:56 +0000 (0:00:00.282)       0:06:43.154 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/nerdctl/releases/download/v2.2.0/nerdctl-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:10:56 +0000 (0:00:00.126)       0:06:43.281 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:10:57 +0000 (0:00:00.407)       0:06:43.689 **********\nok: [instance]\n\nTASK [vexxhost.containers.nerdctl : Create nerdctl config] *********************\nSunday 01 March 2026  22:10:58 +0000 (0:00:01.110)       0:06:44.800 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Uninstall OpenStack client system packages] ***\nSunday 01 March 2026  22:10:59 +0000 (0:00:00.584)       0:06:45.384 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Uninstall Ubuntu Cloud Archive keyring] ***\nSunday 01 March 2026  22:10:59 +0000 (0:00:00.907)       0:06:46.291 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Remove Ubuntu Cloud Archive repository] ***\nSunday 01 March 2026  22:11:00 +0000 (0:00:00.824)       0:06:47.115 **********\nok: [instance]\n\nTASK [Generate OpenStack-Helm endpoints] ***************************************\nSunday 01 March 2026  22:11:01 +0000 (0:00:00.423)       0:06:47.539 **********\nincluded: openstack_helm_endpoints for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:11:01 +0000 (0:00:00.195)       0:06:47.735 **********\nskipping: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:11:01 +0000 (0:00:00.078)       0:06:47.813 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:11:01 +0000 (0:00:00.071)       0:06:47.885 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:11:01 +0000 (0:00:00.073)       0:06:47.958 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:11:01 +0000 (0:00:00.072)       0:06:48.030 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:11:01 +0000 (0:00:00.081)       0:06:48.112 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:11:01 +0000 (0:00:00.075)       0:06:48.187 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:11:01 +0000 (0:00:00.089)       0:06:48.277 **********\nok: [instance] => (item=identity)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:11:02 +0000 (0:00:00.096)       0:06:48.374 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Generate openrc file] ****************\nSunday 01 March 2026  22:11:02 +0000 (0:00:00.117)       0:06:48.491 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Generate openstack aliases] **********\nSunday 01 March 2026  22:11:02 +0000 (0:00:00.498)       0:06:48.989 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:11:03 +0000 (0:00:00.695)       0:06:49.685 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:11:03 +0000 (0:00:00.129)       0:06:49.815 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  22:11:03 +0000 (0:00:00.133)       0:06:49.949 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  22:11:04 +0000 (0:00:00.762)       0:06:50.711 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  22:11:04 +0000 (0:00:00.188)       0:06:50.900 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  22:11:04 +0000 (0:00:00.180)       0:06:51.080 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  22:11:04 +0000 (0:00:00.201)       0:06:51.281 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:11:05 +0000 (0:00:00.897)       0:06:52.179 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:11:06 +0000 (0:00:00.790)       0:06:52.970 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:11:06 +0000 (0:00:00.081)       0:06:53.052 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:11:06 +0000 (0:00:00.085)       0:06:53.138 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:11:06 +0000 (0:00:00.080)       0:06:53.218 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:11:06 +0000 (0:00:00.083)       0:06:53.301 **********\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=key_manager)\nok: [instance] => (item=identity)\nok: [instance] => (item=oslo_messaging)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:11:07 +0000 (0:00:00.154)       0:06:53.455 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:11:07 +0000 (0:00:00.118)       0:06:53.574 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:11:07 +0000 (0:00:00.109)       0:06:53.683 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.barbican : Deploy Helm chart] ************************\nSunday 01 March 2026  22:11:07 +0000 (0:00:00.641)       0:06:54.325 **********\nok: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:11:10 +0000 (0:00:02.076)       0:06:56.402 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:11:10 +0000 (0:00:00.167)       0:06:56.570 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:11:10 +0000 (0:00:00.073)       0:06:56.643 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:11:10 +0000 (0:00:00.077)       0:06:56.720 **********\nok: [instance]\n\nTASK [Create Ingress key-manager] **********************************************\nSunday 01 March 2026  22:11:10 +0000 (0:00:00.090)       0:06:56.811 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress key-manager] ****************\nSunday 01 March 2026  22:11:10 +0000 (0:00:00.123)       0:06:56.934 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.barbican : Create creator role] **********************\nSunday 01 March 2026  22:11:12 +0000 (0:00:01.489)       0:06:58.424 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.barbican : Add implied roles] ************************\nSunday 01 March 2026  22:11:13 +0000 (0:00:01.028)       0:06:59.452 **********\nok: [instance] => (item={'role': 'member', 'implies': 'creator'})\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:11:16 +0000 (0:00:03.826)       0:07:03.279 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:11:17 +0000 (0:00:00.120)       0:07:03.399 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph : Deploy Helm chart] ***********************\nSunday 01 March 2026  22:11:17 +0000 (0:00:00.640)       0:07:04.040 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:11:20 +0000 (0:00:02.711)       0:07:06.751 **********\nskipping: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:11:20 +0000 (0:00:00.082)       0:07:06.834 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:11:20 +0000 (0:00:00.067)       0:07:06.901 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:11:20 +0000 (0:00:00.080)       0:07:06.981 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:11:20 +0000 (0:00:00.077)       0:07:07.059 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:11:20 +0000 (0:00:00.070)       0:07:07.130 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:11:20 +0000 (0:00:00.088)       0:07:07.218 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:11:20 +0000 (0:00:00.089)       0:07:07.308 **********\nok: [instance] => (item=rook_ceph_cluster)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:11:21 +0000 (0:00:00.102)       0:07:07.410 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:11:21 +0000 (0:00:00.111)       0:07:07.522 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:11:21 +0000 (0:00:00.109)       0:07:07.631 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Set mgr/cephadm/warn_on_stray_daemons to false] ***\nSunday 01 March 2026  22:11:21 +0000 (0:00:00.608)       0:07:08.239 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Collect \"ceph quorum_status\" output from a monitor] ***\nSunday 01 March 2026  22:11:36 +0000 (0:00:14.809)       0:07:23.049 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Retrieve keyring for client.admin] ***\nSunday 01 March 2026  22:11:51 +0000 (0:00:15.061)       0:07:38.111 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Retrieve keyring for monitors] ***\nSunday 01 March 2026  22:11:53 +0000 (0:00:01.643)       0:07:39.755 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Create Ceph cluster resource] ****\nSunday 01 March 2026  22:11:55 +0000 (0:00:01.631)       0:07:41.386 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Deploy Helm chart] ***************\nSunday 01 March 2026  22:11:55 +0000 (0:00:00.835)       0:07:42.221 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Create OpenStack user] ***********\nSunday 01 March 2026  22:11:57 +0000 (0:00:01.833)       0:07:44.055 **********\n[WARNING]: Module did not set no_log for update_password\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Grant access to \"service\" project] ***\nSunday 01 March 2026  22:11:58 +0000 (0:00:01.153)       0:07:45.208 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Create OpenStack service] ********\nSunday 01 March 2026  22:12:02 +0000 (0:00:03.924)       0:07:49.133 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rook_ceph_cluster : Create OpenStack endpoints] ******\nSunday 01 March 2026  22:12:03 +0000 (0:00:01.069)       0:07:50.203 **********\nok: [instance] => (item={'interface': 'public', 'url': 'https://object-store.199-204-45-156.nip.io/swift/v1/%(tenant_id)s'})\nok: [instance] => (item={'interface': 'internal', 'url': 'http://rook-ceph-rgw-ceph.openstack.svc.cluster.local/swift/v1/%(tenant_id)s'})\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:12:05 +0000 (0:00:01.943)       0:07:52.146 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:12:06 +0000 (0:00:00.200)       0:07:52.346 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:12:06 +0000 (0:00:00.096)       0:07:52.443 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:12:06 +0000 (0:00:00.108)       0:07:52.552 **********\nok: [instance]\n\nTASK [Create Ingress rook-ceph-cluster] ****************************************\nSunday 01 March 2026  22:12:06 +0000 (0:00:00.101)       0:07:52.654 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress rook-ceph-cluster] **********\nSunday 01 March 2026  22:12:06 +0000 (0:00:00.166)       0:07:52.820 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:12:07 +0000 (0:00:01.237)       0:07:54.057 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:12:07 +0000 (0:00:00.125)       0:07:54.182 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Collect \"ceph mon dump\" output from a monitor] ***\nSunday 01 March 2026  22:12:08 +0000 (0:00:00.648)       0:07:54.831 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Generate fact with list of Ceph monitors] ***\nSunday 01 March 2026  22:12:23 +0000 (0:00:14.926)       0:08:09.758 **********\nok: [instance] => (item=10.96.240.200)\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Create Ceph service] *************\nSunday 01 March 2026  22:12:23 +0000 (0:00:00.117)       0:08:09.875 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Create Ceph endpoints] ***********\nSunday 01 March 2026  22:12:24 +0000 (0:00:00.756)       0:08:10.631 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Retrieve client.admin keyring] ***\nSunday 01 March 2026  22:12:25 +0000 (0:00:00.800)       0:08:11.432 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Parse client.admin keyring] ******\nSunday 01 March 2026  22:12:26 +0000 (0:00:01.315)       0:08:12.748 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Create \"pvc-ceph-client-key\" secret] ***\nSunday 01 March 2026  22:12:26 +0000 (0:00:00.096)       0:08:12.845 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ceph_provisioners : Deploy Helm chart] ***************\nSunday 01 March 2026  22:12:27 +0000 (0:00:00.956)       0:08:13.801 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:12:29 +0000 (0:00:01.734)       0:08:15.536 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:12:29 +0000 (0:00:00.136)       0:08:15.672 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  22:12:29 +0000 (0:00:00.143)       0:08:15.816 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  22:12:30 +0000 (0:00:00.803)       0:08:16.619 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  22:12:30 +0000 (0:00:00.193)       0:08:16.813 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  22:12:30 +0000 (0:00:00.203)       0:08:17.016 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  22:12:30 +0000 (0:00:00.196)       0:08:17.213 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:12:31 +0000 (0:00:00.951)       0:08:18.164 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:12:32 +0000 (0:00:00.710)       0:08:18.874 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:12:32 +0000 (0:00:00.088)       0:08:18.963 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:12:32 +0000 (0:00:00.078)       0:08:19.041 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:12:32 +0000 (0:00:00.078)       0:08:19.120 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:12:32 +0000 (0:00:00.089)       0:08:19.209 **********\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=dashboard)\nok: [instance] => (item=image)\nok: [instance] => (item=identity)\nok: [instance] => (item=oslo_messaging)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:12:33 +0000 (0:00:00.148)       0:08:19.358 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:12:33 +0000 (0:00:00.120)       0:08:19.478 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:12:33 +0000 (0:00:00.118)       0:08:19.597 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance : Deploy Helm chart] **************************\nSunday 01 March 2026  22:12:33 +0000 (0:00:00.653)       0:08:20.250 **********\nok: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:12:36 +0000 (0:00:02.531)       0:08:22.782 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:12:36 +0000 (0:00:00.171)       0:08:22.953 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:12:36 +0000 (0:00:00.087)       0:08:23.041 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:12:36 +0000 (0:00:00.092)       0:08:23.133 **********\nok: [instance]\n\nTASK [Create Ingress image] ****************************************************\nSunday 01 March 2026  22:12:36 +0000 (0:00:00.098)       0:08:23.231 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress image] **********************\nSunday 01 March 2026  22:12:37 +0000 (0:00:00.307)       0:08:23.539 **********\nok: [instance]\n\nTASK [Create images] ***********************************************************\nSunday 01 March 2026  22:12:38 +0000 (0:00:01.192)       0:08:24.732 **********\nincluded: glance_image for instance => (item={'container_format': 'bare', 'disk_format': 'raw', 'is_public': True, 'min_disk': 1, 'name': 'cirros', 'url': 'http://download.cirros-cloud.net/0.6.2/cirros-0.6.2-x86_64-disk.img'})\n\nTASK [vexxhost.atmosphere.qemu_utils : Install packages] ***********************\nSunday 01 March 2026  22:12:38 +0000 (0:00:00.201)       0:08:24.933 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************\nSunday 01 March 2026  22:12:39 +0000 (0:00:01.277)       0:08:26.211 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****\nSunday 01 March 2026  22:12:40 +0000 (0:00:01.082)       0:08:27.293 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Download image] ***********************\nSunday 01 March 2026  22:12:41 +0000 (0:00:00.069)       0:08:27.363 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Get image format] *********************\nSunday 01 March 2026  22:12:41 +0000 (0:00:00.090)       0:08:27.453 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***\nSunday 01 March 2026  22:12:41 +0000 (0:00:00.089)       0:08:27.543 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******\nSunday 01 March 2026  22:12:41 +0000 (0:00:00.087)       0:08:27.630 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************\nSunday 01 March 2026  22:12:41 +0000 (0:00:00.092)       0:08:27.722 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************\nSunday 01 March 2026  22:12:41 +0000 (0:00:00.085)       0:08:27.807 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:12:41 +0000 (0:00:00.233)       0:08:28.041 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:12:41 +0000 (0:00:00.111)       0:08:28.152 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:12:41 +0000 (0:00:00.076)       0:08:28.229 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:12:41 +0000 (0:00:00.071)       0:08:28.300 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:12:42 +0000 (0:00:00.076)       0:08:28.377 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:12:42 +0000 (0:00:00.084)       0:08:28.461 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:12:42 +0000 (0:00:00.090)       0:08:28.552 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:12:42 +0000 (0:00:00.090)       0:08:28.643 **********\nok: [instance] => (item=identity)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=staffeln)\nok: [instance] => (item=oslo_db)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:12:42 +0000 (0:00:00.127)       0:08:28.770 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:12:42 +0000 (0:00:00.272)       0:08:29.043 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:12:42 +0000 (0:00:00.118)       0:08:29.161 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.staffeln : Deploy Helm chart] ************************\nSunday 01 March 2026  22:12:43 +0000 (0:00:00.609)       0:08:29.770 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:12:45 +0000 (0:00:02.004)       0:08:31.775 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:12:45 +0000 (0:00:00.129)       0:08:31.905 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  22:12:45 +0000 (0:00:00.148)       0:08:32.053 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  22:12:46 +0000 (0:00:00.778)       0:08:32.831 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  22:12:46 +0000 (0:00:00.178)       0:08:33.009 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  22:12:46 +0000 (0:00:00.198)       0:08:33.208 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  22:12:47 +0000 (0:00:00.188)       0:08:33.396 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:12:47 +0000 (0:00:00.924)       0:08:34.321 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:12:48 +0000 (0:00:00.739)       0:08:35.061 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:12:48 +0000 (0:00:00.080)       0:08:35.141 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:12:48 +0000 (0:00:00.079)       0:08:35.221 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:12:48 +0000 (0:00:00.089)       0:08:35.311 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:12:49 +0000 (0:00:00.084)       0:08:35.395 **********\nok: [instance] => (item=volumev3)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=image)\nok: [instance] => (item=identity)\nok: [instance] => (item=oslo_messaging)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:12:49 +0000 (0:00:00.165)       0:08:35.561 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:12:49 +0000 (0:00:00.126)       0:08:35.688 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:12:49 +0000 (0:00:00.120)       0:08:35.809 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.cinder : Generate Helm values] ***********************\nSunday 01 March 2026  22:12:50 +0000 (0:00:00.625)       0:08:36.434 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.cinder : Append Helm values (Staffeln)] **************\nSunday 01 March 2026  22:12:50 +0000 (0:00:00.469)       0:08:36.903 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.cinder : Deploy Helm chart] **************************\nSunday 01 March 2026  22:12:50 +0000 (0:00:00.404)       0:08:37.308 **********\nok: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:12:53 +0000 (0:00:02.502)       0:08:39.811 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:12:53 +0000 (0:00:00.172)       0:08:39.984 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:12:53 +0000 (0:00:00.088)       0:08:40.072 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:12:53 +0000 (0:00:00.081)       0:08:40.154 **********\nok: [instance]\n\nTASK [Create Ingress volumev3] *************************************************\nSunday 01 March 2026  22:12:53 +0000 (0:00:00.091)       0:08:40.245 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress volumev3] *******************\nSunday 01 March 2026  22:12:54 +0000 (0:00:00.145)       0:08:40.390 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:12:55 +0000 (0:00:01.232)       0:08:41.623 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:12:55 +0000 (0:00:00.116)       0:08:41.739 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:12:55 +0000 (0:00:00.079)       0:08:41.819 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:12:55 +0000 (0:00:00.085)       0:08:41.904 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:12:55 +0000 (0:00:00.082)       0:08:41.986 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:12:55 +0000 (0:00:00.096)       0:08:42.083 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:12:55 +0000 (0:00:00.080)       0:08:42.164 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:12:55 +0000 (0:00:00.092)       0:08:42.256 **********\nok: [instance] => (item=identity)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=placement)\nok: [instance] => (item=oslo_db)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:12:56 +0000 (0:00:00.130)       0:08:42.386 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:12:56 +0000 (0:00:00.128)       0:08:42.514 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:12:56 +0000 (0:00:00.116)       0:08:42.631 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.placement : Deploy Helm chart] ***********************\nSunday 01 March 2026  22:12:56 +0000 (0:00:00.616)       0:08:43.248 **********\nok: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:12:59 +0000 (0:00:02.293)       0:08:45.541 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:12:59 +0000 (0:00:00.165)       0:08:45.706 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:12:59 +0000 (0:00:00.082)       0:08:45.789 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:12:59 +0000 (0:00:00.093)       0:08:45.883 **********\nok: [instance]\n\nTASK [Create Ingress placement] ************************************************\nSunday 01 March 2026  22:12:59 +0000 (0:00:00.101)       0:08:45.984 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress placement] ******************\nSunday 01 March 2026  22:12:59 +0000 (0:00:00.120)       0:08:46.104 **********\nok: [instance]\n\nPLAY [Configure operating system] **********************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:13:00 +0000 (0:00:01.151)       0:08:47.255 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.lpfc : Detect if the \"lpfc\" module is loaded] ********\nSunday 01 March 2026  22:13:04 +0000 (0:00:04.076)       0:08:51.332 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.lpfc : Install the configuration file] ***************\nSunday 01 March 2026  22:13:05 +0000 (0:00:00.241)       0:08:51.574 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.lpfc : Get the values for the module parameters] *****\nSunday 01 March 2026  22:13:05 +0000 (0:00:00.078)       0:08:51.653 **********\nskipping: [instance] => (item=lpfc_lun_queue_depth)\nskipping: [instance] => (item=lpfc_sg_seg_cnt)\nskipping: [instance] => (item=lpfc_max_luns)\nskipping: [instance] => (item=lpfc_enable_fc4_type)\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.lpfc : Detect if the run-time module parameters are set correctly] ***\nSunday 01 March 2026  22:13:05 +0000 (0:00:00.086)       0:08:51.740 **********\nskipping: [instance] => (item=lpfc_lun_queue_depth)\nskipping: [instance] => (item=lpfc_sg_seg_cnt)\nskipping: [instance] => (item=lpfc_max_luns)\nskipping: [instance] => (item=lpfc_enable_fc4_type)\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.lpfc : Update \"initramfs\" if the configuration file has changed] ***\nSunday 01 March 2026  22:13:05 +0000 (0:00:00.088)       0:08:51.828 **********\nskipping: [instance]\n\nTASK [Reboot the system if the configuration file has changed] *****************\nSunday 01 March 2026  22:13:05 +0000 (0:00:00.085)       0:08:51.914 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Add backports PPA] **********************\nSunday 01 March 2026  22:13:05 +0000 (0:00:00.099)       0:08:52.013 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Install the multipathd package] *********\nSunday 01 March 2026  22:13:06 +0000 (0:00:00.386)       0:08:52.400 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.multipathd : Install the configuration file] *********\nSunday 01 March 2026  22:13:07 +0000 (0:00:01.319)       0:08:53.719 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.iscsi : Ensure iscsid is started] ********************\nSunday 01 March 2026  22:13:07 +0000 (0:00:00.569)       0:08:54.288 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.udev : Add udev rules for Pure Storage FlashArray] ***\nSunday 01 March 2026  22:13:08 +0000 (0:00:00.458)       0:08:54.746 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.udev : Add udev rules for SCSI Unit Attention] *******\nSunday 01 March 2026  22:13:08 +0000 (0:00:00.483)       0:08:55.230 **********\nok: [instance]\n\nPLAY [Deploy SDN] **************************************************************\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:13:09 +0000 (0:00:00.624)       0:08:55.855 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:13:09 +0000 (0:00:00.098)       0:08:55.954 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:13:09 +0000 (0:00:00.071)       0:08:56.025 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:13:09 +0000 (0:00:00.077)       0:08:56.102 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:13:09 +0000 (0:00:00.072)       0:08:56.175 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:13:09 +0000 (0:00:00.078)       0:08:56.253 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:13:09 +0000 (0:00:00.063)       0:08:56.317 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:13:10 +0000 (0:00:00.077)       0:08:56.394 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:13:10 +0000 (0:00:00.079)       0:08:56.474 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:13:10 +0000 (0:00:00.137)       0:08:56.611 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:13:10 +0000 (0:00:00.114)       0:08:56.726 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openvswitch : Get the current status of all systemd values for containerd] ***\nSunday 01 March 2026  22:13:10 +0000 (0:00:00.608)       0:08:57.334 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openvswitch : Assert that LimitMEMLOCK is set to infinity] ***\nSunday 01 March 2026  22:13:11 +0000 (0:00:00.270)       0:08:57.605 **********\nok: [instance] => {\n    \"changed\": false,\n    \"msg\": \"All assertions passed\"\n}\n\nTASK [vexxhost.atmosphere.openvswitch : Pull openvswitch image] ****************\nSunday 01 March 2026  22:13:11 +0000 (0:00:00.071)       0:08:57.676 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openvswitch : Verify openvswitch image pull] *********\nSunday 01 March 2026  22:13:11 +0000 (0:00:00.528)       0:08:58.205 **********\nok: [instance] => {\n    \"changed\": false,\n    \"msg\": \"Successfully pulled openvswitch image\"\n}\n\nTASK [vexxhost.atmosphere.openvswitch : Deploy Helm chart] *********************\nSunday 01 March 2026  22:13:11 +0000 (0:00:00.072)       0:08:58.278 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:13:13 +0000 (0:00:01.731)       0:09:00.009 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:13:13 +0000 (0:00:00.302)       0:09:00.312 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ovn : Check if ovn_controller DaemonSet exists] ******\nSunday 01 March 2026  22:13:14 +0000 (0:00:00.617)       0:09:00.929 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ovn : Delete existing ovn controller DaemonSet if type label is found] ***\nSunday 01 March 2026  22:13:15 +0000 (0:00:00.703)       0:09:01.633 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.ovn : Pull ovn-controller image] *********************\nSunday 01 March 2026  22:13:15 +0000 (0:00:00.084)       0:09:01.717 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.ovn : Verify ovn-controller image pull] **************\nSunday 01 March 2026  22:13:15 +0000 (0:00:00.531)       0:09:02.249 **********\nok: [instance] => {\n    \"changed\": false,\n    \"msg\": \"Successfully pulled ovn-controller image\"\n}\n\nTASK [vexxhost.atmosphere.ovn : Deploy Helm chart] *****************************\nSunday 01 March 2026  22:13:15 +0000 (0:00:00.083)       0:09:02.333 **********\nok: [instance]\n\nPLAY [controllers[0]] **********************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:13:18 +0000 (0:00:02.081)       0:09:04.414 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:13:22 +0000 (0:00:03.979)       0:09:08.394 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:13:22 +0000 (0:00:00.099)       0:09:08.493 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:13:22 +0000 (0:00:00.064)       0:09:08.558 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:13:22 +0000 (0:00:00.078)       0:09:08.636 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:13:22 +0000 (0:00:00.193)       0:09:08.829 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:13:22 +0000 (0:00:00.075)       0:09:08.905 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:13:22 +0000 (0:00:00.096)       0:09:09.001 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:13:22 +0000 (0:00:00.094)       0:09:09.096 **********\nok: [instance] => (item=libvirt_exporter)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:13:22 +0000 (0:00:00.093)       0:09:09.189 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:13:22 +0000 (0:00:00.137)       0:09:09.327 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:13:23 +0000 (0:00:00.137)       0:09:09.465 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.libvirt : Create CA certificates] ********************\nSunday 01 March 2026  22:13:23 +0000 (0:00:00.633)       0:09:10.098 **********\nok: [instance] => (item=libvirt-vnc)\nok: [instance] => (item=libvirt-api)\n\nTASK [vexxhost.atmosphere.libvirt : Create Issuers] ****************************\nSunday 01 March 2026  22:13:25 +0000 (0:00:01.405)       0:09:11.504 **********\nok: [instance] => (item=libvirt-vnc)\nok: [instance] => (item=libvirt-api)\n\nTASK [vexxhost.atmosphere.libvirt : Deploy Helm chart] *************************\nSunday 01 March 2026  22:13:26 +0000 (0:00:01.401)       0:09:12.905 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:13:28 +0000 (0:00:02.045)       0:09:14.950 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:13:28 +0000 (0:00:00.113)       0:09:15.064 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.coredns : Deploy Helm chart] *************************\nSunday 01 March 2026  22:13:29 +0000 (0:00:00.583)       0:09:15.647 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstacksdk : Install openstacksdk] *****************\nSunday 01 March 2026  22:13:29 +0000 (0:00:00.162)       0:09:15.810 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstacksdk : Create openstack config directory] ****\nSunday 01 March 2026  22:13:30 +0000 (0:00:00.993)       0:09:16.803 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstacksdk : Generate cloud config file] ***********\nSunday 01 March 2026  22:13:30 +0000 (0:00:00.248)       0:09:17.052 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:13:31 +0000 (0:00:00.607)       0:09:17.660 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:13:31 +0000 (0:00:00.280)       0:09:17.940 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  22:13:31 +0000 (0:00:00.267)       0:09:18.208 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  22:13:32 +0000 (0:00:00.721)       0:09:18.929 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  22:13:32 +0000 (0:00:00.205)       0:09:19.135 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  22:13:33 +0000 (0:00:00.216)       0:09:19.351 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  22:13:33 +0000 (0:00:00.180)       0:09:19.532 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:13:34 +0000 (0:00:00.981)       0:09:20.513 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:13:34 +0000 (0:00:00.696)       0:09:21.209 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:13:34 +0000 (0:00:00.089)       0:09:21.298 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:13:35 +0000 (0:00:00.082)       0:09:21.381 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:13:35 +0000 (0:00:00.080)       0:09:21.462 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:13:35 +0000 (0:00:00.074)       0:09:21.536 **********\nok: [instance] => (item=volumev3)\nok: [instance] => (item=baremetal)\nok: [instance] => (item=compute_novnc_proxy)\nok: [instance] => (item=oslo_messaging)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=placement)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=compute)\nok: [instance] => (item=oslo_db_api)\nok: [instance] => (item=image)\nok: [instance] => (item=network)\nok: [instance] => (item=identity)\nok: [instance] => (item=compute_metadata)\nok: [instance] => (item=oslo_db_cell0)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:13:35 +0000 (0:00:00.260)       0:09:21.797 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:13:35 +0000 (0:00:00.126)       0:09:21.923 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:13:35 +0000 (0:00:00.129)       0:09:22.053 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.nova : Generate temporary file for SSH public key] ***\nSunday 01 March 2026  22:13:36 +0000 (0:00:00.636)       0:09:22.689 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.nova : Write contents of current private SSH key] ****\nSunday 01 March 2026  22:13:36 +0000 (0:00:00.434)       0:09:23.123 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.nova : Generate public key for SSH private key] ******\nSunday 01 March 2026  22:13:37 +0000 (0:00:00.648)       0:09:23.772 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.nova : Delete temporary file for public SSH key] *****\nSunday 01 March 2026  22:13:37 +0000 (0:00:00.546)       0:09:24.318 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.nova : Remove nova-bootstrap and nova-cell-setup job] ***\nSunday 01 March 2026  22:13:38 +0000 (0:00:00.230)       0:09:24.549 **********\nok: [instance] => (item=nova-bootstrap)\nok: [instance] => (item=nova-cell-setup)\n\nTASK [vexxhost.atmosphere.nova : Deploy Helm chart] ****************************\nSunday 01 March 2026  22:13:39 +0000 (0:00:01.428)       0:09:25.978 **********\nok: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:13:43 +0000 (0:00:03.751)       0:09:29.730 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:13:43 +0000 (0:00:00.404)       0:09:30.135 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:13:43 +0000 (0:00:00.080)       0:09:30.215 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:13:43 +0000 (0:00:00.083)       0:09:30.299 **********\nok: [instance]\n\nTASK [Create Ingress compute] **************************************************\nSunday 01 March 2026  22:13:44 +0000 (0:00:00.098)       0:09:30.398 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress compute] ********************\nSunday 01 March 2026  22:13:44 +0000 (0:00:00.127)       0:09:30.526 **********\nok: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:13:45 +0000 (0:00:01.188)       0:09:31.714 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:13:45 +0000 (0:00:00.180)       0:09:31.894 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:13:45 +0000 (0:00:00.085)       0:09:31.980 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:13:45 +0000 (0:00:00.089)       0:09:32.069 **********\nok: [instance]\n\nTASK [Create Ingress compute-novnc-proxy] **************************************\nSunday 01 March 2026  22:13:45 +0000 (0:00:00.105)       0:09:32.174 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress compute-novnc-proxy] ********\nSunday 01 March 2026  22:13:45 +0000 (0:00:00.130)       0:09:32.305 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.nova : Wait until compute api service ready] *********\nSunday 01 March 2026  22:13:47 +0000 (0:00:01.360)       0:09:33.665 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.nova : Create flavors] *******************************\nSunday 01 March 2026  22:13:48 +0000 (0:00:00.802)       0:09:34.468 **********\nok: [instance] => (item={'disk': 1, 'name': 'm1.tiny', 'ram': 512, 'vcpus': 1})\nok: [instance] => (item={'disk': 20, 'name': 'm1.small', 'ram': 2048, 'vcpus': 1})\nok: [instance] => (item={'disk': 40, 'name': 'm1.medium', 'ram': 4096, 'vcpus': 2})\nok: [instance] => (item={'disk': 80, 'name': 'm1.large', 'ram': 8192, 'vcpus': 4})\nok: [instance] => (item={'disk': 160, 'name': 'm1.xlarge', 'ram': 16384, 'vcpus': 8})\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:13:53 +0000 (0:00:05.473)       0:09:39.941 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:13:53 +0000 (0:00:00.307)       0:09:40.249 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  22:13:54 +0000 (0:00:00.137)       0:09:40.386 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  22:13:54 +0000 (0:00:00.737)       0:09:41.123 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  22:13:55 +0000 (0:00:00.374)       0:09:41.497 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  22:13:55 +0000 (0:00:00.401)       0:09:41.899 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  22:13:55 +0000 (0:00:00.200)       0:09:42.099 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:13:57 +0000 (0:00:01.264)       0:09:43.363 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:13:57 +0000 (0:00:00.752)       0:09:44.116 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:13:57 +0000 (0:00:00.085)       0:09:44.201 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:13:57 +0000 (0:00:00.088)       0:09:44.289 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:13:58 +0000 (0:00:00.085)       0:09:44.375 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:13:58 +0000 (0:00:00.085)       0:09:44.461 **********\nok: [instance] => (item=oslo_messaging)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=compute)\nok: [instance] => (item=compute_metadata)\nok: [instance] => (item=identity)\nok: [instance] => (item=network)\nok: [instance] => (item=dns)\nok: [instance] => (item=baremetal)\nok: [instance] => (item=load_balancer)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:13:58 +0000 (0:00:00.190)       0:09:44.652 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:13:58 +0000 (0:00:00.128)       0:09:44.780 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:13:58 +0000 (0:00:00.123)       0:09:44.904 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.neutron : Set external_dns_driver] *******************\nSunday 01 March 2026  22:13:59 +0000 (0:00:00.634)       0:09:45.539 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.neutron : Generate Helm values] **********************\nSunday 01 March 2026  22:13:59 +0000 (0:00:00.082)       0:09:45.621 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.neutron : Append Helm values] ************************\nSunday 01 March 2026  22:13:59 +0000 (0:00:00.348)       0:09:45.970 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.neutron : Append Helm values (neutron_policy_server)] ***\nSunday 01 March 2026  22:13:59 +0000 (0:00:00.100)       0:09:46.070 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.neutron : Deploy Helm chart] *************************\nSunday 01 March 2026  22:13:59 +0000 (0:00:00.103)       0:09:46.174 **********\nok: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:14:02 +0000 (0:00:02.430)       0:09:48.604 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:14:02 +0000 (0:00:00.184)       0:09:48.789 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:14:02 +0000 (0:00:00.083)       0:09:48.872 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:14:02 +0000 (0:00:00.095)       0:09:48.968 **********\nok: [instance]\n\nTASK [Create Ingress network] **************************************************\nSunday 01 March 2026  22:14:02 +0000 (0:00:00.096)       0:09:49.064 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress network] ********************\nSunday 01 March 2026  22:14:02 +0000 (0:00:00.134)       0:09:49.199 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.neutron : Wait until network service ready] **********\nSunday 01 March 2026  22:14:04 +0000 (0:00:01.257)       0:09:50.457 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.neutron : Create networks] ***************************\nSunday 01 March 2026  22:14:05 +0000 (0:00:00.891)       0:09:51.348 **********\nok: [instance] => (item={'external': True, 'mtu_size': 1500, 'name': 'public', 'port_security_enabled': True, 'provider_network_type': 'flat', 'provider_physical_network': 'external', 'shared': True, 'subnets': [{'allocation_pool_end': '10.96.250.220', 'allocation_pool_start': '10.96.250.200', 'cidr': '10.96.250.0/24', 'enable_dhcp': True, 'gateway_ip': '10.96.250.10', 'name': 'public-subnet'}]})\n\nTASK [vexxhost.atmosphere.neutron : Create subnets] ****************************\nSunday 01 March 2026  22:14:06 +0000 (0:00:01.130)       0:09:52.479 **********\nok: [instance] => (item=[{'external': True, 'mtu_size': 1500, 'name': 'public', 'port_security_enabled': True, 'provider_network_type': 'flat', 'provider_physical_network': 'external', 'shared': True}, {'allocation_pool_end': '10.96.250.220', 'allocation_pool_start': '10.96.250.200', 'cidr': '10.96.250.0/24', 'enable_dhcp': True, 'gateway_ip': '10.96.250.10', 'name': 'public-subnet'}])\n\nTASK [vexxhost.atmosphere.senlin : Remove OpenStack endpoints] *****************\nSunday 01 March 2026  22:14:07 +0000 (0:00:01.275)       0:09:53.754 **********\nok: [instance] => (item=public)\nok: [instance] => (item=admin)\nok: [instance] => (item=internal)\n\nTASK [vexxhost.atmosphere.senlin : Remove OpenStack service] *******************\nSunday 01 March 2026  22:14:10 +0000 (0:00:02.649)       0:09:56.404 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.senlin : Remove Ingress] *****************************\nSunday 01 March 2026  22:14:10 +0000 (0:00:00.897)       0:09:57.301 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.senlin : Remove Helm chart] **************************\nSunday 01 March 2026  22:14:11 +0000 (0:00:00.756)       0:09:58.058 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.senlin : Remove OpenStack user] **********************\nSunday 01 March 2026  22:14:12 +0000 (0:00:00.626)       0:09:58.684 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:14:13 +0000 (0:00:01.245)       0:09:59.929 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:14:13 +0000 (0:00:00.112)       0:10:00.041 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  22:14:13 +0000 (0:00:00.149)       0:10:00.190 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  22:14:14 +0000 (0:00:00.737)       0:10:00.928 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  22:14:14 +0000 (0:00:00.179)       0:10:01.107 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  22:14:14 +0000 (0:00:00.178)       0:10:01.285 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  22:14:15 +0000 (0:00:00.178)       0:10:01.464 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:14:16 +0000 (0:00:00.924)       0:10:02.388 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:14:16 +0000 (0:00:00.721)       0:10:03.110 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:14:16 +0000 (0:00:00.088)       0:10:03.198 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:14:16 +0000 (0:00:00.086)       0:10:03.285 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:14:17 +0000 (0:00:00.095)       0:10:03.380 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:14:17 +0000 (0:00:00.089)       0:10:03.469 **********\nok: [instance] => (item=orchestration)\nok: [instance] => (item=cloudformation)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=identity)\nok: [instance] => (item=oslo_messaging)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:14:17 +0000 (0:00:00.139)       0:10:03.609 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:14:17 +0000 (0:00:00.113)       0:10:03.722 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:14:17 +0000 (0:00:00.120)       0:10:03.843 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.heat : Deploy Helm chart] ****************************\nSunday 01 March 2026  22:14:18 +0000 (0:00:00.648)       0:10:04.491 **********\nok: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:14:20 +0000 (0:00:02.788)       0:10:07.280 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:14:21 +0000 (0:00:00.275)       0:10:07.555 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:14:21 +0000 (0:00:00.072)       0:10:07.628 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:14:21 +0000 (0:00:00.098)       0:10:07.726 **********\nok: [instance]\n\nTASK [Create Ingress orchestration] ********************************************\nSunday 01 March 2026  22:14:21 +0000 (0:00:00.093)       0:10:07.819 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress orchestration] **************\nSunday 01 March 2026  22:14:21 +0000 (0:00:00.132)       0:10:07.952 **********\nok: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:14:22 +0000 (0:00:01.117)       0:10:09.069 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:14:22 +0000 (0:00:00.170)       0:10:09.240 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:14:22 +0000 (0:00:00.074)       0:10:09.314 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:14:23 +0000 (0:00:00.094)       0:10:09.409 **********\nok: [instance]\n\nTASK [Create Ingress cloudformation] *******************************************\nSunday 01 March 2026  22:14:23 +0000 (0:00:00.090)       0:10:09.500 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress cloudformation] *************\nSunday 01 March 2026  22:14:23 +0000 (0:00:00.127)       0:10:09.627 **********\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/etc/nerdctl)] *********\nSunday 01 March 2026  22:14:24 +0000 (0:00:01.379)       0:10:11.007 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  22:14:24 +0000 (0:00:00.305)       0:10:11.313 **********\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nSunday 01 March 2026  22:14:26 +0000 (0:00:01.293)       0:10:12.606 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:14:26 +0000 (0:00:00.324)       0:10:12.931 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/nerdctl/releases/download/v2.2.0/nerdctl-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:14:26 +0000 (0:00:00.129)       0:10:13.060 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:14:27 +0000 (0:00:00.582)       0:10:13.643 **********\nok: [instance]\n\nTASK [vexxhost.containers.nerdctl : Create nerdctl config] *********************\nSunday 01 March 2026  22:14:28 +0000 (0:00:01.399)       0:10:15.043 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Uninstall OpenStack client system packages] ***\nSunday 01 March 2026  22:14:29 +0000 (0:00:00.513)       0:10:15.556 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Uninstall Ubuntu Cloud Archive keyring] ***\nSunday 01 March 2026  22:14:30 +0000 (0:00:00.821)       0:10:16.378 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Remove Ubuntu Cloud Archive repository] ***\nSunday 01 March 2026  22:14:30 +0000 (0:00:00.781)       0:10:17.159 **********\nok: [instance]\n\nTASK [Generate OpenStack-Helm endpoints] ***************************************\nSunday 01 March 2026  22:14:31 +0000 (0:00:00.410)       0:10:17.570 **********\nincluded: openstack_helm_endpoints for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:14:31 +0000 (0:00:00.201)       0:10:17.772 **********\nskipping: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:14:31 +0000 (0:00:00.100)       0:10:17.872 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:14:31 +0000 (0:00:00.088)       0:10:17.960 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:14:31 +0000 (0:00:00.088)       0:10:18.049 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:14:31 +0000 (0:00:00.079)       0:10:18.129 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:14:31 +0000 (0:00:00.088)       0:10:18.217 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:14:31 +0000 (0:00:00.089)       0:10:18.307 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:14:32 +0000 (0:00:00.090)       0:10:18.397 **********\nok: [instance] => (item=identity)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:14:32 +0000 (0:00:00.090)       0:10:18.487 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Generate openrc file] ****************\nSunday 01 March 2026  22:14:32 +0000 (0:00:00.117)       0:10:18.605 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Generate openstack aliases] **********\nSunday 01 March 2026  22:14:32 +0000 (0:00:00.506)       0:10:19.111 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:14:33 +0000 (0:00:00.631)       0:10:19.742 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:14:33 +0000 (0:00:00.119)       0:10:19.862 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  22:14:33 +0000 (0:00:00.139)       0:10:20.002 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  22:14:34 +0000 (0:00:00.799)       0:10:20.801 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  22:14:34 +0000 (0:00:00.209)       0:10:21.011 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  22:14:34 +0000 (0:00:00.202)       0:10:21.213 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  22:14:35 +0000 (0:00:00.384)       0:10:21.597 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:14:36 +0000 (0:00:01.230)       0:10:22.828 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:14:37 +0000 (0:00:00.782)       0:10:23.610 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:14:37 +0000 (0:00:00.094)       0:10:23.705 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:14:37 +0000 (0:00:00.094)       0:10:23.799 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:14:37 +0000 (0:00:00.081)       0:10:23.881 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:14:37 +0000 (0:00:00.070)       0:10:23.951 **********\nok: [instance] => (item=valkey)\nok: [instance] => (item=oslo_db_persistence)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=network)\nok: [instance] => (item=identity)\nok: [instance] => (item=oslo_messaging)\nok: [instance] => (item=load_balancer)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:14:37 +0000 (0:00:00.168)       0:10:24.120 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:14:37 +0000 (0:00:00.121)       0:10:24.241 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:14:38 +0000 (0:00:00.119)       0:10:24.360 **********\nok: [instance]\n\nTASK [atmosphere.common.secretgen_controller : Deploy secretgen-controller] ****\nSunday 01 March 2026  22:14:38 +0000 (0:00:00.630)       0:10:24.991 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create management network] *****************\nSunday 01 March 2026  22:14:40 +0000 (0:00:01.502)       0:10:26.494 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create management subnet] ******************\nSunday 01 March 2026  22:14:41 +0000 (0:00:01.045)       0:10:27.540 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create health manager security group] ******\nSunday 01 March 2026  22:14:42 +0000 (0:00:01.058)       0:10:28.598 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create health manager security group rules] ***\nSunday 01 March 2026  22:14:43 +0000 (0:00:01.281)       0:10:29.880 **********\nok: [instance] => (item={'protocol': 'udp', 'port': 5555})\nok: [instance] => (item={'protocol': 'udp', 'port': 10514})\nok: [instance] => (item={'protocol': 'udp', 'port': 20514})\nok: [instance] => (item={'protocol': 'tcp', 'port': 10514})\nok: [instance] => (item={'protocol': 'tcp', 'port': 20514})\n\nTASK [vexxhost.atmosphere.octavia : Create health manager networking ports] ****\nSunday 01 March 2026  22:14:48 +0000 (0:00:05.241)       0:10:35.122 **********\nok: [instance] => (item=instance)\n\nTASK [vexxhost.atmosphere.octavia : Discover facts for other controllers] ******\nSunday 01 March 2026  22:14:50 +0000 (0:00:01.444)       0:10:36.567 **********\nok: [instance] => (item=instance)\n\nTASK [vexxhost.atmosphere.octavia : Set binding for ports] *********************\nSunday 01 March 2026  22:14:53 +0000 (0:00:03.177)       0:10:39.744 **********\nok: [instance] => (item=instance)\n\nTASK [vexxhost.atmosphere.octavia : Get health manager networking ports] *******\nSunday 01 March 2026  22:14:57 +0000 (0:00:03.892)       0:10:43.636 **********\nok: [instance] => (item=instance)\n\nTASK [vexxhost.atmosphere.octavia : Set controller_ip_port_list] ***************\nSunday 01 March 2026  22:14:58 +0000 (0:00:01.165)       0:10:44.802 **********\nok: [instance] => (item=octavia-health-manager-port-instance)\n\nTASK [vexxhost.atmosphere.octavia : Create amphora security group] *************\nSunday 01 March 2026  22:14:58 +0000 (0:00:00.110)       0:10:44.912 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create amphora security group rules] *******\nSunday 01 March 2026  22:14:59 +0000 (0:00:00.948)       0:10:45.860 **********\nok: [instance] => (item=[22, {'changed': False, 'ports': [{'allowed_address_pairs': [], 'binding_host_id': 'instance', 'binding_profile': {}, 'binding_vif_details': {'port_filter': True, 'connectivity': 'l2', 'bridge_name': 'br-int', 'datapath_type': 'system', 'bound_drivers': {'0': 'ovn'}}, 'binding_vif_type': 'ovs', 'binding_vnic_type': 'normal', 'created_at': '2026-03-01T21:50:27Z', 'data_plane_status': None, 'description': '', 'device_id': '', 'device_owner': 'octavia:health-mgr', 'device_profile': None, 'dns_assignment': [{'ip_address': '172.24.2.21', 'hostname': 'host-172-24-2-21', 'fqdn': 'host-172-24-2-21.openstacklocal.'}], 'dns_domain': '', 'dns_name': '', 'extra_dhcp_opts': [], 'fixed_ips': [{'subnet_id': '64e18f24-c519-4c88-a50c-38ef854b39a1', 'ip_address': '172.24.2.21'}], 'hardware_offload_type': None, 'ip_allocation': 'immediate', 'is_admin_state_up': True, 'is_port_security_enabled': True, 'mac_address': 'fa:16:3e:3d:ac:d9', 'name': 'octavia-health-manager-port-instance', 'network_id': 'bb482a7a-c46c-457a-a186-b44bfb1280f8', 'numa_affinity_policy': None, 'project_id': 'b6357918f73b458f9e03a5667d465a0e', 'tenant_id': 'b6357918f73b458f9e03a5667d465a0e', 'propagate_uplink_status': None, 'qos_network_policy_id': None, 'qos_policy_id': None, 'resource_request': None, 'security_group_ids': ['5ab21866-8cda-4495-bce8-e3077a52ee5a'], 'status': 'ACTIVE', 'trunk_details': None, 'trusted': None, 'updated_at': '2026-03-01T21:53:57Z', 'revision_number': 4, 'if_match': None, 'id': '0b032e00-fdd0-4807-8cd5-933cf322bf30', 'tags': []}], 'invocation': {'module_args': {'port': 'octavia-health-manager-port-instance', 'name': 'octavia-health-manager-port-instance', 'wait': True, 'timeout': 180, 'interface': 'public', 'sdk_log_level': 'INFO', 'auth_type': None, 'auth': None, 'region_name': None, 'validate_certs': None, 'ca_cert': None, 'client_cert': None, 'client_key': None, 'api_timeout': None, 'sdk_log_path': None, 'filters': None}}, 'failed': False, 'item': 'instance', 'ansible_loop_var': 'item'}])\nok: [instance] => (item=[9443, {'changed': False, 'ports': [{'allowed_address_pairs': [], 'binding_host_id': 'instance', 'binding_profile': {}, 'binding_vif_details': {'port_filter': True, 'connectivity': 'l2', 'bridge_name': 'br-int', 'datapath_type': 'system', 'bound_drivers': {'0': 'ovn'}}, 'binding_vif_type': 'ovs', 'binding_vnic_type': 'normal', 'created_at': '2026-03-01T21:50:27Z', 'data_plane_status': None, 'description': '', 'device_id': '', 'device_owner': 'octavia:health-mgr', 'device_profile': None, 'dns_assignment': [{'ip_address': '172.24.2.21', 'hostname': 'host-172-24-2-21', 'fqdn': 'host-172-24-2-21.openstacklocal.'}], 'dns_domain': '', 'dns_name': '', 'extra_dhcp_opts': [], 'fixed_ips': [{'subnet_id': '64e18f24-c519-4c88-a50c-38ef854b39a1', 'ip_address': '172.24.2.21'}], 'hardware_offload_type': None, 'ip_allocation': 'immediate', 'is_admin_state_up': True, 'is_port_security_enabled': True, 'mac_address': 'fa:16:3e:3d:ac:d9', 'name': 'octavia-health-manager-port-instance', 'network_id': 'bb482a7a-c46c-457a-a186-b44bfb1280f8', 'numa_affinity_policy': None, 'project_id': 'b6357918f73b458f9e03a5667d465a0e', 'tenant_id': 'b6357918f73b458f9e03a5667d465a0e', 'propagate_uplink_status': None, 'qos_network_policy_id': None, 'qos_policy_id': None, 'resource_request': None, 'security_group_ids': ['5ab21866-8cda-4495-bce8-e3077a52ee5a'], 'status': 'ACTIVE', 'trunk_details': None, 'trusted': None, 'updated_at': '2026-03-01T21:53:57Z', 'revision_number': 4, 'if_match': None, 'id': '0b032e00-fdd0-4807-8cd5-933cf322bf30', 'tags': []}], 'invocation': {'module_args': {'port': 'octavia-health-manager-port-instance', 'name': 'octavia-health-manager-port-instance', 'wait': True, 'timeout': 180, 'interface': 'public', 'sdk_log_level': 'INFO', 'auth_type': None, 'auth': None, 'region_name': None, 'validate_certs': None, 'ca_cert': None, 'client_cert': None, 'client_key': None, 'api_timeout': None, 'sdk_log_path': None, 'filters': None}}, 'failed': False, 'item': 'instance', 'ansible_loop_var': 'item'}])\n\nTASK [vexxhost.atmosphere.octavia : Create amphora flavor] *********************\nSunday 01 March 2026  22:15:01 +0000 (0:00:02.143)       0:10:48.003 **********\nok: [instance]\n\nTASK [Upload Amphora image] ****************************************************\nSunday 01 March 2026  22:15:02 +0000 (0:00:01.028)       0:10:49.032 **********\nincluded: glance_image for instance\n\nTASK [vexxhost.atmosphere.qemu_utils : Install packages] ***********************\nSunday 01 March 2026  22:15:02 +0000 (0:00:00.212)       0:10:49.245 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************\nSunday 01 March 2026  22:15:04 +0000 (0:00:01.432)       0:10:50.678 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****\nSunday 01 March 2026  22:15:05 +0000 (0:00:01.017)       0:10:51.695 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Download image] ***********************\nSunday 01 March 2026  22:15:05 +0000 (0:00:00.086)       0:10:51.781 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Get image format] *********************\nSunday 01 March 2026  22:15:05 +0000 (0:00:00.085)       0:10:51.867 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***\nSunday 01 March 2026  22:15:05 +0000 (0:00:00.081)       0:10:51.949 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******\nSunday 01 March 2026  22:15:05 +0000 (0:00:00.083)       0:10:52.033 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************\nSunday 01 March 2026  22:15:05 +0000 (0:00:00.075)       0:10:52.108 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************\nSunday 01 March 2026  22:15:05 +0000 (0:00:00.078)       0:10:52.186 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Get Amphora image information] *************\nSunday 01 March 2026  22:15:05 +0000 (0:00:00.124)       0:10:52.311 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create Amphora SSH key] ********************\nSunday 01 March 2026  22:15:06 +0000 (0:00:01.007)       0:10:53.319 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Grab generated Amphora public key] *********\nSunday 01 March 2026  22:15:07 +0000 (0:00:00.807)       0:10:54.127 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Import Amphora SSH key-pair in OpenStack] ***\nSunday 01 March 2026  22:15:08 +0000 (0:00:00.690)       0:10:54.817 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create CAs & Issuers] **********************\nSunday 01 March 2026  22:15:09 +0000 (0:00:01.279)       0:10:56.096 **********\nok: [instance] => (item=octavia-client)\nok: [instance] => (item=octavia-server)\n\nTASK [vexxhost.atmosphere.octavia : Create certificate for Octavia clients] ****\nSunday 01 March 2026  22:15:11 +0000 (0:00:01.484)       0:10:57.581 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Create admin compute quotaset] *************\nSunday 01 March 2026  22:15:12 +0000 (0:00:01.117)       0:10:58.699 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Deploy Helm chart] *************************\nSunday 01 March 2026  22:15:13 +0000 (0:00:01.490)       0:11:00.189 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.octavia : Add implied roles] *************************\nSunday 01 March 2026  22:15:16 +0000 (0:00:02.376)       0:11:02.566 **********\nok: [instance] => (item={'role': 'member', 'implies': 'load-balancer_member'})\nok: [instance] => (item={'role': 'reader', 'implies': 'load-balancer_observer'})\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:15:23 +0000 (0:00:07.617)       0:11:10.184 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:15:24 +0000 (0:00:00.195)       0:11:10.379 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:15:24 +0000 (0:00:00.097)       0:11:10.477 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:15:24 +0000 (0:00:00.077)       0:11:10.554 **********\nok: [instance]\n\nTASK [Create Ingress load-balancer] ********************************************\nSunday 01 March 2026  22:15:24 +0000 (0:00:00.093)       0:11:10.648 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress load-balancer] **************\nSunday 01 March 2026  22:15:24 +0000 (0:00:00.129)       0:11:10.777 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:15:25 +0000 (0:00:01.266)       0:11:12.044 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:15:25 +0000 (0:00:00.121)       0:11:12.166 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  22:15:25 +0000 (0:00:00.142)       0:11:12.308 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  22:15:26 +0000 (0:00:00.726)       0:11:13.035 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  22:15:26 +0000 (0:00:00.204)       0:11:13.239 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  22:15:27 +0000 (0:00:00.219)       0:11:13.458 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  22:15:27 +0000 (0:00:00.201)       0:11:13.660 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:15:28 +0000 (0:00:00.874)       0:11:14.534 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:15:28 +0000 (0:00:00.737)       0:11:15.272 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:15:29 +0000 (0:00:00.085)       0:11:15.357 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:15:29 +0000 (0:00:00.097)       0:11:15.455 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:15:29 +0000 (0:00:00.073)       0:11:15.528 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:15:29 +0000 (0:00:00.214)       0:11:15.742 **********\nok: [instance] => (item=orchestration)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=key_manager)\nok: [instance] => (item=container_infra)\nok: [instance] => (item=identity)\nok: [instance] => (item=oslo_messaging)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:15:29 +0000 (0:00:00.173)       0:11:15.916 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:15:29 +0000 (0:00:00.121)       0:11:16.037 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:15:29 +0000 (0:00:00.120)       0:11:16.158 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:15:30 +0000 (0:00:00.626)       0:11:16.784 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/kubernetes-sigs/cluster-api/releases/download/v1.10.5/clusterctl-linux-amd64\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:15:30 +0000 (0:00:00.098)       0:11:16.882 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:15:30 +0000 (0:00:00.425)       0:11:17.308 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.clusterctl : Create a configuration file] ************\nSunday 01 March 2026  22:15:31 +0000 (0:00:00.121)       0:11:17.430 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:15:31 +0000 (0:00:00.634)       0:11:18.064 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:15:31 +0000 (0:00:00.143)       0:11:18.207 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.openstack_resource_controller : Create build directory] ***\nSunday 01 March 2026  22:15:32 +0000 (0:00:00.618)       0:11:18.826 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.openstack_resource_controller : Upload Kustomization] ***\nSunday 01 March 2026  22:15:32 +0000 (0:00:00.255)       0:11:19.081 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.openstack_resource_controller : Generate manifests] ***\nSunday 01 March 2026  22:15:33 +0000 (0:00:00.568)       0:11:19.649 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.openstack_resource_controller : Apply manifest to cluster] ***\nSunday 01 March 2026  22:15:34 +0000 (0:00:01.030)       0:11:20.680 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Create a folder for the Cluster API providers] ***\nSunday 01 March 2026  22:15:37 +0000 (0:00:02.729)       0:11:23.409 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Copy over all provider configuration to the remote system] ***\nSunday 01 March 2026  22:15:37 +0000 (0:00:00.262)       0:11:23.672 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Get a list of all Cluster API providers] ***\nSunday 01 March 2026  22:15:39 +0000 (0:00:02.110)       0:11:25.782 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Initialize the management cluster] *****\nSunday 01 March 2026  22:15:40 +0000 (0:00:00.727)       0:11:26.509 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Parse provider resources into version mapping] ***\nSunday 01 March 2026  22:15:40 +0000 (0:00:00.073)       0:11:26.583 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Run upgrade if required] ***************\nSunday 01 March 2026  22:15:40 +0000 (0:00:00.100)       0:11:26.683 **********\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Set node selector for Cluster API components] ***\nSunday 01 March 2026  22:15:40 +0000 (0:00:00.077)       0:11:26.760 **********\nok: [instance] => (item={'namespace': 'capi-kubeadm-bootstrap-system', 'name': 'capi-kubeadm-bootstrap-controller-manager'})\nok: [instance] => (item={'namespace': 'capi-kubeadm-control-plane-system', 'name': 'capi-kubeadm-control-plane-controller-manager'})\nok: [instance] => (item={'namespace': 'capi-system', 'name': 'capi-controller-manager'})\nok: [instance] => (item={'namespace': 'capo-system', 'name': 'capo-controller-manager'})\n\nTASK [vexxhost.kubernetes.cluster_api : Set default values for imagePullPolicy in kubeadmConfigSpec of CRDs] ***\nSunday 01 March 2026  22:15:43 +0000 (0:00:02.879)       0:11:29.640 **********\nskipping: [instance] => (item={'crd': 'kubeadmcontrolplanetemplates.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/1/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/kubeadmConfigSpec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance] => (item={'crd': 'kubeadmcontrolplanetemplates.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/1/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/kubeadmConfigSpec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance] => (item={'crd': 'kubeadmconfigs.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance] => (item={'crd': 'kubeadmconfigs.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance] => (item={'crd': 'kubeadmconfigtemplates.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance] => (item={'crd': 'kubeadmconfigtemplates.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance] => (item={'crd': 'kubeadmcontrolplanes.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/kubeadmConfigSpec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance] => (item={'crd': 'kubeadmcontrolplanes.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/kubeadmConfigSpec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})\nskipping: [instance]\n\nTASK [vexxhost.kubernetes.cluster_api : Set CAPO instance creation timeout] ****\nSunday 01 March 2026  22:15:43 +0000 (0:00:00.117)       0:11:29.757 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.magnum : Deploy Cluster API for Magnum RBAC] *********\nSunday 01 March 2026  22:15:44 +0000 (0:00:01.192)       0:11:30.950 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.magnum : Deploy Helm chart] **************************\nSunday 01 March 2026  22:15:45 +0000 (0:00:00.789)       0:11:31.740 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.magnum : Deploy \"magnum-cluster-api-proxy\"] **********\nSunday 01 March 2026  22:15:47 +0000 (0:00:02.268)       0:11:34.008 **********\nok: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:15:48 +0000 (0:00:00.797)       0:11:34.806 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:15:48 +0000 (0:00:00.170)       0:11:34.977 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:15:48 +0000 (0:00:00.092)       0:11:35.070 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:15:48 +0000 (0:00:00.089)       0:11:35.159 **********\nok: [instance]\n\nTASK [Create Ingress container-infra] ******************************************\nSunday 01 March 2026  22:15:48 +0000 (0:00:00.095)       0:11:35.254 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress container-infra] ************\nSunday 01 March 2026  22:15:49 +0000 (0:00:00.141)       0:11:35.395 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.magnum : Deploy magnum registry] *********************\nSunday 01 March 2026  22:15:50 +0000 (0:00:01.387)       0:11:36.783 **********\nok: [instance]\n\nTASK [Create magnum registry Ingress] ******************************************\nSunday 01 March 2026  22:15:51 +0000 (0:00:00.828)       0:11:37.612 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:15:51 +0000 (0:00:00.177)       0:11:37.789 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:15:51 +0000 (0:00:00.070)       0:11:37.860 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:15:51 +0000 (0:00:00.072)       0:11:37.932 **********\nok: [instance]\n\nTASK [Create Ingress container-infra-registry] *********************************\nSunday 01 March 2026  22:15:51 +0000 (0:00:00.088)       0:11:38.021 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress container-infra-registry] ***\nSunday 01 March 2026  22:15:51 +0000 (0:00:00.283)       0:11:38.304 **********\nok: [instance]\n\nTASK [Upload images] ***********************************************************\nSunday 01 March 2026  22:15:53 +0000 (0:00:01.158)       0:11:39.463 **********\nincluded: glance_image for instance => (item={'name': 'ubuntu-2204-kube-v1.34.3', 'url': 'https://github.com/vexxhost/capo-image-elements/releases/download/2025.12-3/ubuntu-22.04-v1.34.3.qcow2', 'distro': 'ubuntu'})\n\nTASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************\nSunday 01 March 2026  22:15:53 +0000 (0:00:00.226)       0:11:39.690 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****\nSunday 01 March 2026  22:15:54 +0000 (0:00:00.944)       0:11:40.634 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Download image] ***********************\nSunday 01 March 2026  22:15:54 +0000 (0:00:00.074)       0:11:40.708 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Get image format] *********************\nSunday 01 March 2026  22:15:54 +0000 (0:00:00.098)       0:11:40.807 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***\nSunday 01 March 2026  22:15:54 +0000 (0:00:00.076)       0:11:40.884 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******\nSunday 01 March 2026  22:15:54 +0000 (0:00:00.082)       0:11:40.966 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************\nSunday 01 March 2026  22:15:54 +0000 (0:00:00.079)       0:11:41.046 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************\nSunday 01 March 2026  22:15:54 +0000 (0:00:00.091)       0:11:41.137 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:15:55 +0000 (0:00:00.243)       0:11:41.380 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:15:55 +0000 (0:00:00.126)       0:11:41.507 **********\nincluded: rabbitmq for instance\n\nTASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***\nSunday 01 March 2026  22:15:55 +0000 (0:00:00.130)       0:11:41.638 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******\nSunday 01 March 2026  22:15:56 +0000 (0:00:00.701)       0:11:42.340 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***\nSunday 01 March 2026  22:15:56 +0000 (0:00:00.176)       0:11:42.516 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****\nSunday 01 March 2026  22:15:56 +0000 (0:00:00.181)       0:11:42.697 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************\nSunday 01 March 2026  22:15:56 +0000 (0:00:00.204)       0:11:42.902 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:15:57 +0000 (0:00:00.859)       0:11:43.762 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:15:58 +0000 (0:00:00.703)       0:11:44.465 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:15:58 +0000 (0:00:00.080)       0:11:44.546 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:15:58 +0000 (0:00:00.080)       0:11:44.626 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:15:58 +0000 (0:00:00.074)       0:11:44.701 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:15:58 +0000 (0:00:00.076)       0:11:44.778 **********\nok: [instance] => (item=sharev2)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\nok: [instance] => (item=identity)\nok: [instance] => (item=oslo_messaging)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:15:58 +0000 (0:00:00.124)       0:11:44.902 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:15:58 +0000 (0:00:00.288)       0:11:45.191 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:15:58 +0000 (0:00:00.106)       0:11:45.298 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.manila : Create flavor] ******************************\nSunday 01 March 2026  22:15:59 +0000 (0:00:00.577)       0:11:45.875 **********\nok: [instance]\n\nTASK [Upload service image] ****************************************************\nSunday 01 March 2026  22:16:00 +0000 (0:00:01.011)       0:11:46.887 **********\nincluded: glance_image for instance\n\nTASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************\nSunday 01 March 2026  22:16:00 +0000 (0:00:00.189)       0:11:47.076 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****\nSunday 01 March 2026  22:16:01 +0000 (0:00:00.943)       0:11:48.020 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Download image] ***********************\nSunday 01 March 2026  22:16:01 +0000 (0:00:00.078)       0:11:48.098 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Get image format] *********************\nSunday 01 March 2026  22:16:01 +0000 (0:00:00.088)       0:11:48.187 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***\nSunday 01 March 2026  22:16:01 +0000 (0:00:00.077)       0:11:48.264 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******\nSunday 01 March 2026  22:16:02 +0000 (0:00:00.075)       0:11:48.340 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************\nSunday 01 March 2026  22:16:02 +0000 (0:00:00.082)       0:11:48.422 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************\nSunday 01 March 2026  22:16:02 +0000 (0:00:00.076)       0:11:48.498 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.manila : Create generic share driver security group] ***\nSunday 01 March 2026  22:16:02 +0000 (0:00:00.133)       0:11:48.631 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.manila : Create generic share driver security group tcp rules] ***\nSunday 01 March 2026  22:16:03 +0000 (0:00:01.056)       0:11:49.688 **********\nok: [instance] => (item=22)\nok: [instance] => (item=111)\nok: [instance] => (item=2049)\n\nTASK [vexxhost.atmosphere.manila : Create generic share driver security group icmp rules] ***\nSunday 01 March 2026  22:16:06 +0000 (0:00:03.238)       0:11:52.926 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.manila : Generate temporary file for SSH public key] ***\nSunday 01 March 2026  22:16:07 +0000 (0:00:01.176)       0:11:54.102 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.manila : Write contents of current private SSH key] ***\nSunday 01 March 2026  22:16:07 +0000 (0:00:00.228)       0:11:54.331 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.manila : Generate public key for SSH private key] ****\nSunday 01 March 2026  22:16:08 +0000 (0:00:00.398)       0:11:54.729 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.manila : Delete temporary file for public SSH key] ***\nSunday 01 March 2026  22:16:08 +0000 (0:00:00.327)       0:11:55.057 **********\nok: [instance -> localhost]\n\nTASK [vexxhost.atmosphere.manila : Create secret with the SSH keys] ************\nSunday 01 March 2026  22:16:08 +0000 (0:00:00.223)       0:11:55.280 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.manila : Deploy Helm chart] **************************\nSunday 01 March 2026  22:16:09 +0000 (0:00:00.767)       0:11:56.048 **********\nok: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:16:11 +0000 (0:00:02.233)       0:11:58.282 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:16:12 +0000 (0:00:00.196)       0:11:58.479 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:16:12 +0000 (0:00:00.085)       0:11:58.564 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:16:12 +0000 (0:00:00.265)       0:11:58.829 **********\nok: [instance]\n\nTASK [Create Ingress sharev2] **************************************************\nSunday 01 March 2026  22:16:12 +0000 (0:00:00.085)       0:11:58.915 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress sharev2] ********************\nSunday 01 March 2026  22:16:12 +0000 (0:00:00.128)       0:11:59.043 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.manila : Update service tenant quotas] ***************\nSunday 01 March 2026  22:16:13 +0000 (0:00:01.146)       0:12:00.190 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:16:15 +0000 (0:00:01.375)       0:12:01.565 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:16:15 +0000 (0:00:00.102)       0:12:01.668 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:16:15 +0000 (0:00:00.075)       0:12:01.743 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:16:15 +0000 (0:00:00.079)       0:12:01.822 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:16:15 +0000 (0:00:00.083)       0:12:01.906 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:16:15 +0000 (0:00:00.082)       0:12:01.989 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:16:15 +0000 (0:00:00.075)       0:12:02.064 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:16:15 +0000 (0:00:00.069)       0:12:02.133 **********\nok: [instance] => (item=identity)\nok: [instance] => (item=dashboard)\nok: [instance] => (item=oslo_cache)\nok: [instance] => (item=oslo_db)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:16:15 +0000 (0:00:00.099)       0:12:02.233 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:16:16 +0000 (0:00:00.109)       0:12:02.342 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:16:16 +0000 (0:00:00.128)       0:12:02.471 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.horizon : Deploy Helm chart] *************************\nSunday 01 March 2026  22:16:16 +0000 (0:00:00.615)       0:12:03.086 **********\nok: [instance]\n\nTASK [Create Ingress] **********************************************************\nSunday 01 March 2026  22:16:18 +0000 (0:00:02.225)       0:12:05.311 **********\nincluded: openstack_helm_ingress for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********\nSunday 01 March 2026  22:16:19 +0000 (0:00:00.176)       0:12:05.488 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***\nSunday 01 March 2026  22:16:19 +0000 (0:00:00.086)       0:12:05.575 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***\nSunday 01 March 2026  22:16:19 +0000 (0:00:00.086)       0:12:05.662 **********\nok: [instance]\n\nTASK [Create Ingress dashboard] ************************************************\nSunday 01 March 2026  22:16:19 +0000 (0:00:00.107)       0:12:05.770 **********\nincluded: ingress for instance\n\nTASK [vexxhost.atmosphere.ingress : Create Ingress dashboard] ******************\nSunday 01 March 2026  22:16:19 +0000 (0:00:00.266)       0:12:06.037 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_exporter : Deploy service] *****************\nSunday 01 March 2026  22:16:20 +0000 (0:00:01.186)       0:12:07.224 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_exporter : Fetch Neutron DB secret] ********\nSunday 01 March 2026  22:16:21 +0000 (0:00:00.850)       0:12:08.074 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_exporter : Fetch Nova DB secret] ***********\nSunday 01 March 2026  22:16:22 +0000 (0:00:00.695)       0:12:08.770 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_exporter : Fetch Octavia DB secret] ********\nSunday 01 March 2026  22:16:23 +0000 (0:00:00.759)       0:12:09.529 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_exporter : Create \"openstack-database-exporter-dsn\" secret] ***\nSunday 01 March 2026  22:16:23 +0000 (0:00:00.725)       0:12:10.255 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_exporter : Deploy service] *****************\nSunday 01 March 2026  22:16:24 +0000 (0:00:00.749)       0:12:11.004 **********\nok: [instance]\n\nPLAY [controllers] *************************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:16:25 +0000 (0:00:00.846)       0:12:11.851 **********\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/etc/nerdctl)] *********\nSunday 01 March 2026  22:16:29 +0000 (0:00:04.091)       0:12:15.942 **********\nok: [instance]\n\nTASK [vexxhost.containers.package : Update state for tar] **********************\nSunday 01 March 2026  22:16:29 +0000 (0:00:00.306)       0:12:16.248 **********\nok: [instance]\n\nTASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***\nSunday 01 March 2026  22:16:31 +0000 (0:00:01.405)       0:12:17.654 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Starting download of file] *******\nSunday 01 March 2026  22:16:31 +0000 (0:00:00.308)       0:12:17.963 **********\nok: [instance] => {\n    \"msg\": \"https://github.com/containerd/nerdctl/releases/download/v2.2.0/nerdctl-2.2.0-linux-amd64.tar.gz\"\n}\n\nTASK [vexxhost.containers.download_artifact : Download item] *******************\nSunday 01 March 2026  22:16:31 +0000 (0:00:00.122)       0:12:18.085 **********\nok: [instance]\n\nTASK [vexxhost.containers.download_artifact : Extract archive] *****************\nSunday 01 March 2026  22:16:32 +0000 (0:00:00.421)       0:12:18.507 **********\nok: [instance]\n\nTASK [vexxhost.containers.nerdctl : Create nerdctl config] *********************\nSunday 01 March 2026  22:16:33 +0000 (0:00:01.133)       0:12:19.640 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Uninstall OpenStack client system packages] ***\nSunday 01 March 2026  22:16:34 +0000 (0:00:00.725)       0:12:20.366 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Uninstall Ubuntu Cloud Archive keyring] ***\nSunday 01 March 2026  22:16:34 +0000 (0:00:00.805)       0:12:21.171 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Remove Ubuntu Cloud Archive repository] ***\nSunday 01 March 2026  22:16:35 +0000 (0:00:00.909)       0:12:22.081 **********\nok: [instance]\n\nTASK [Generate OpenStack-Helm endpoints] ***************************************\nSunday 01 March 2026  22:16:36 +0000 (0:00:00.438)       0:12:22.520 **********\nincluded: openstack_helm_endpoints for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:16:36 +0000 (0:00:00.167)       0:12:22.688 **********\nskipping: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:16:36 +0000 (0:00:00.083)       0:12:22.771 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:16:36 +0000 (0:00:00.074)       0:12:22.846 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:16:36 +0000 (0:00:00.072)       0:12:22.918 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:16:36 +0000 (0:00:00.074)       0:12:22.992 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:16:36 +0000 (0:00:00.068)       0:12:23.061 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:16:36 +0000 (0:00:00.070)       0:12:23.131 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:16:36 +0000 (0:00:00.082)       0:12:23.214 **********\nok: [instance] => (item=identity)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:16:36 +0000 (0:00:00.101)       0:12:23.316 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Generate openrc file] ****************\nSunday 01 March 2026  22:16:37 +0000 (0:00:00.111)       0:12:23.428 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_cli : Generate openstack aliases] **********\nSunday 01 March 2026  22:16:37 +0000 (0:00:00.525)       0:12:23.953 **********\nok: [instance]\n\nPLAY [Configure networking] ****************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:16:38 +0000 (0:00:00.550)       0:12:24.504 **********\nok: [instance]\n\nTASK [Add IP address to \"br-ex\"] ***********************************************\nSunday 01 March 2026  22:16:42 +0000 (0:00:04.145)       0:12:28.650 **********\nok: [instance]\n\nTASK [Set \"br-ex\" interface to \"up\"] *******************************************\nSunday 01 March 2026  22:16:42 +0000 (0:00:00.258)       0:12:28.908 **********\nok: [instance]\n\nPLAY RECAP *********************************************************************\ninstance                   : ok=747  changed=0    unreachable=0    failed=0    skipped=277  rescued=0    ignored=0\n\nSunday 01 March 2026  22:16:42 +0000 (0:00:00.229)       0:12:29.138 **********\n===============================================================================\nvexxhost.atmosphere.kube_prometheus_stack : Deploy additional dashboards -- 15.57s\nvexxhost.atmosphere.kube_prometheus_stack : Deploy Helm chart ---------- 15.44s\nvexxhost.atmosphere.rook_ceph_cluster : Collect \"ceph quorum_status\" output from a monitor -- 15.06s\nvexxhost.atmosphere.ceph_provisioners : Collect \"ceph mon dump\" output from a monitor -- 14.93s\nvexxhost.atmosphere.rook_ceph_cluster : Set mgr/cephadm/warn_on_stray_daemons to false -- 14.81s\nvexxhost.ceph.osd : Get `ceph-volume lvm list` status ------------------ 10.98s\nvexxhost.ceph.mon : Validate monitor exist ----------------------------- 10.87s\nvexxhost.ceph.osd : Adopt OSDs to cluster ------------------------------ 10.09s\nvexxhost.atmosphere.kube_prometheus_stack : Install all CRDs ------------ 8.40s\nvexxhost.atmosphere.octavia : Add implied roles ------------------------- 7.62s\nvexxhost.ceph.mon : Get `cephadm ls` status ----------------------------- 5.70s\nvexxhost.ceph.osd : Get `cephadm ls` status ----------------------------- 5.57s\nvexxhost.ceph.osd : Ensure all OSDs are non-legacy ---------------------- 5.50s\nvexxhost.atmosphere.nova : Create flavors ------------------------------- 5.47s\nvexxhost.atmosphere.octavia : Create health manager security group rules --- 5.24s\nGathering Facts --------------------------------------------------------- 5.16s\nGathering Facts --------------------------------------------------------- 5.12s\nGathering Facts --------------------------------------------------------- 5.00s\nvexxhost.atmosphere.kube_prometheus_stack : Create Keycloak roles ------- 4.63s\nGathering Facts --------------------------------------------------------- 4.39s\nINFO     [aio > idempotence] Executed: Successful\nINFO     [aio > side_effect] Executing\nWARNING  [aio > side_effect] Executed: Missing playbook (Remove from test_sequence to suppress)\nINFO     [aio > verify] Executing\n\nPLAY [Run tests] ***************************************************************\n\nTASK [Run \"stestr\" tests] ******************************************************\nSunday 01 March 2026  22:16:44 +0000 (0:00:00.009)       0:00:00.009 **********\nchanged: [localhost]\n\nPLAY [controllers[0]] **********************************************************\n\nTASK [Gathering Facts] *********************************************************\nSunday 01 March 2026  22:16:46 +0000 (0:00:02.878)       0:00:02.888 **********\n[WARNING]: Platform linux on host instance is using the discovered Python\ninterpreter at /usr/bin/python3.10, but future installation of another Python\ninterpreter could change the meaning of that path. See\nhttps://docs.ansible.com/ansible-\ncore/2.17/reference_appendices/interpreter_discovery.html for more information.\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstacksdk : Install openstacksdk] *****************\nSunday 01 March 2026  22:16:52 +0000 (0:00:05.311)       0:00:08.199 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstacksdk : Create openstack config directory] ****\nSunday 01 March 2026  22:16:53 +0000 (0:00:01.109)       0:00:09.309 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstacksdk : Generate cloud config file] ***********\nSunday 01 March 2026  22:16:53 +0000 (0:00:00.378)       0:00:09.687 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:16:54 +0000 (0:00:00.652)       0:00:10.340 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:16:54 +0000 (0:00:00.177)       0:00:10.518 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:16:54 +0000 (0:00:00.073)       0:00:10.591 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:16:54 +0000 (0:00:00.065)       0:00:10.657 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:16:54 +0000 (0:00:00.061)       0:00:10.718 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:16:54 +0000 (0:00:00.064)       0:00:10.783 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:16:54 +0000 (0:00:00.061)       0:00:10.844 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:16:54 +0000 (0:00:00.074)       0:00:10.919 **********\nok: [instance] => (item=identity)\nok: [instance] => (item=dashboard)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:16:55 +0000 (0:00:00.082)       0:00:11.002 **********\nok: [instance]\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***\nSunday 01 March 2026  22:16:55 +0000 (0:00:00.100)       0:00:11.102 **********\nincluded: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance\n\nTASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************\nSunday 01 March 2026  22:16:55 +0000 (0:00:00.161)       0:00:11.264 **********\nchanged: [instance]\n\nTASK [Generate OpenStack-Helm endpoints] ***************************************\nSunday 01 March 2026  22:16:55 +0000 (0:00:00.647)       0:00:11.911 **********\nincluded: openstack_helm_endpoints for instance\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***\nSunday 01 March 2026  22:16:56 +0000 (0:00:00.158)       0:00:12.070 **********\nok: [instance]\n\nTASK [Create RabbitMQ cluster] *************************************************\nSunday 01 March 2026  22:16:56 +0000 (0:00:00.133)       0:00:12.203 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***\nSunday 01 March 2026  22:16:56 +0000 (0:00:00.069)       0:00:12.273 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***\nSunday 01 March 2026  22:16:56 +0000 (0:00:00.069)       0:00:12.343 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***\nSunday 01 March 2026  22:16:56 +0000 (0:00:00.061)       0:00:12.404 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***\nSunday 01 March 2026  22:16:56 +0000 (0:00:00.063)       0:00:12.467 **********\nskipping: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***\nSunday 01 March 2026  22:16:56 +0000 (0:00:00.071)       0:00:12.538 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***\nSunday 01 March 2026  22:16:56 +0000 (0:00:00.073)       0:00:12.612 **********\nok: [instance] => (item=identity)\nok: [instance] => (item=dashboard)\n\nTASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********\nSunday 01 March 2026  22:16:56 +0000 (0:00:00.100)       0:00:12.712 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.tempest : Get test image object] *********************\nSunday 01 March 2026  22:16:56 +0000 (0:00:00.103)       0:00:12.816 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.tempest : Configure test image ref] ******************\nSunday 01 March 2026  22:16:57 +0000 (0:00:01.127)       0:00:13.943 **********\nok: [instance] => (item={'key': 'conf', 'value': {'tempest': {'compute': {'image_ref': 'fe32d17b-dbe1-462c-bd98-f9a72c2d9bd0'}}}})\n\nTASK [vexxhost.atmosphere.tempest : Get test flavor object] ********************\nSunday 01 March 2026  22:16:58 +0000 (0:00:00.089)       0:00:14.032 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.tempest : Set test flavor ref] ***********************\nSunday 01 March 2026  22:16:59 +0000 (0:00:01.156)       0:00:15.188 **********\nok: [instance] => (item={'key': 'conf', 'value': {'tempest': {'compute': {'flavor_ref': '0e6c2c28-f07e-4876-9011-42d2234ae20d'}}}})\n\nTASK [vexxhost.atmosphere.tempest : Get test network object] *******************\nSunday 01 March 2026  22:16:59 +0000 (0:00:00.095)       0:00:15.284 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.tempest : Set test network ref] **********************\nSunday 01 March 2026  22:17:00 +0000 (0:00:01.141)       0:00:16.426 **********\nok: [instance] => (item={'key': 'conf', 'value': {'tempest': {'network': {'public_network_id': 'da6f04f9-28d2-4224-9cbf-d4cb1587678d'}}}})\n\nTASK [vexxhost.atmosphere.tempest : Deploy Helm chart] *************************\nSunday 01 March 2026  22:17:00 +0000 (0:00:00.082)       0:00:16.509 **********\nchanged: [instance]\n\nTASK [vexxhost.atmosphere.tempest : Get tempest job object] ********************\nSunday 01 March 2026  22:24:58 +0000 (0:07:58.208)       0:08:14.718 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.tempest : Get tempest log] ***************************\nSunday 01 March 2026  22:24:59 +0000 (0:00:01.042)       0:08:15.760 **********\nok: [instance]\n\nTASK [vexxhost.atmosphere.tempest : Print tempest log details] *****************\nSunday 01 March 2026  22:25:00 +0000 (0:00:01.000)       0:08:16.760 **********\nok: [instance] => {\n    \"msg\": [\n        \"+ tempest cleanup --init-saved-state\",\n        \"+ '[' true == false ']'\",\n        \"+ tempest run --config-file /etc/tempest/tempest.conf -w 4 --smoke\",\n        \"/var/lib/openstack/lib/python3.12/site-packages/jsonpath_rw_ext/_string.py:19: SyntaxWarning: invalid escape sequence '\\\\('\",\n        \"  SUB = re.compile(\\\"sub\\\\(/(.*)/,\\\\s+(.*)\\\\)\\\")\",\n        \"/var/lib/openstack/lib/python3.12/site-packages/jsonpath_rw_ext/_string.py:20: SyntaxWarning: invalid escape sequence '\\\\('\",\n        \"  SPLIT = re.compile(\\\"split\\\\((.),\\\\s+(\\\\d+),\\\\s+(\\\\d+|-1)\\\\)\\\")\",\n        \"{1} tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_get_flavor [0.143926s] ... ok\",\n        \"{1} tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_list_flavors [0.064843s] ... ok\",\n        \"{2} tempest.api.compute.security_groups.test_security_group_rules.SecurityGroupRulesTestJSON.test_security_group_rules_create [1.264053s] ... ok\",\n        \"{2} tempest.api.compute.security_groups.test_security_group_rules.SecurityGroupRulesTestJSON.test_security_group_rules_list [0.956075s] ... ok\",\n        \"{0} tempest.api.compute.security_groups.test_security_groups.SecurityGroupsTestJSON.test_security_groups_create_list_delete [3.489915s] ... ok\",\n        \"{2} tempest.api.identity.admin.v3.test_groups.GroupsV3TestJSON.test_group_users_add_list_delete [2.785619s] ... ok\",\n        \"{1} tempest.api.compute.servers.test_create_server.ServersTestJSON.test_list_servers [0.149341s] ... ok\",\n        \"{1} tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_server_details [0.000341s] ... ok\",\n        \"{2} tempest.api.identity.admin.v3.test_regions.RegionsTestJSON.test_create_region_with_specific_id [0.122054s] ... ok\",\n        \"{2} tempest.api.identity.admin.v3.test_services.ServicesTestJSON.test_create_update_get_service [0.296498s] ... ok\",\n        \"{1} tempest.api.compute.test_versions.TestVersions.test_get_version_details [0.634856s] ... ok\",\n        \"{1} tempest.api.compute.test_versions.TestVersions.test_list_api_versions [0.008604s] ... ok\",\n        \"{1} tempest.api.identity.admin.v3.test_credentials.CredentialsTestJSON.test_credentials_create_get_update_delete [0.161895s] ... ok\",\n        \"{0} tempest.api.compute.servers.test_create_server.ServersTestBootFromVolume.test_list_servers [0.095146s] ... ok\",\n        \"{0} tempest.api.compute.servers.test_create_server.ServersTestBootFromVolume.test_verify_server_details [0.000474s] ... ok\",\n        \"{2} tempest.api.network.test_networks.BulkNetworkOpsTest.test_bulk_create_delete_network [4.370945s] ... ok\",\n        \"{2} tempest.api.network.test_networks.BulkNetworkOpsTest.test_bulk_create_delete_port [2.981629s] ... ok\",\n        \"{1} tempest.api.identity.admin.v3.test_policies.PoliciesTestJSON.test_create_update_delete_policy [0.204571s] ... ok\",\n        \"{3} tempest.api.compute.servers.test_attach_interfaces.AttachInterfacesUnderV243Test.test_add_remove_fixed_ip [39.107806s] ... ok\",\n        \"{2} tempest.api.network.test_networks.BulkNetworkOpsTest.test_bulk_create_delete_subnet [3.907191s] ... ok\",\n        \"{1} tempest.api.identity.admin.v3.test_trusts.TrustsV3TestJSON.test_get_trusts_all [2.526857s] ... ok\",\n        \"{1} tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_media_types [0.093393s] ... ok\",\n        \"{1} tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_resources [0.031125s] ... ok\",\n        \"{1} tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_statuses [0.032603s] ... ok\",\n        \"{1} tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_list_api_versions [0.008297s] ... ok\",\n        \"{2} tempest.api.network.test_networks.NetworksIpV6Test.test_create_update_delete_network_subnet [3.907190s] ... ok\",\n        \"{2} tempest.api.network.test_networks.NetworksIpV6Test.test_external_network_visibility [0.210343s] ... ok\",\n        \"{2} tempest.api.network.test_networks.NetworksIpV6Test.test_list_networks [0.420401s] ... ok\",\n        \"{2} tempest.api.network.test_networks.NetworksIpV6Test.test_list_subnets [0.064731s] ... ok\",\n        \"{2} tempest.api.network.test_networks.NetworksIpV6Test.test_show_network [0.154238s] ... ok\",\n        \"{2} tempest.api.network.test_networks.NetworksIpV6Test.test_show_subnet [0.087403s] ... ok\",\n        \"{1} tempest.api.identity.v3.test_domains.DefaultDomainTestJSON.test_default_domain_exists [0.112793s] ... ok\",\n        \"{0} tempest.api.compute.servers.test_server_addresses.ServerAddressesTestJSON.test_list_server_addresses [0.042528s] ... ok\",\n        \"{0} tempest.api.compute.servers.test_server_addresses.ServerAddressesTestJSON.test_list_server_addresses_by_network [0.090606s] ... ok\",\n        \"{1} tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_delete_image [0.428597s] ... ok\",\n        \"{1} tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_register_upload_get_image_file [0.561660s] ... ok\",\n        \"{1} tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_update_image [0.170459s] ... ok\",\n        \"{3} tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_list_servers [0.063175s] ... ok\",\n        \"{3} tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_server_details [0.000396s] ... ok\",\n        \"{0} tempest.api.identity.admin.v3.test_domains.DomainsTestJSON.test_create_update_delete_domain [0.584610s] ... ok\",\n        \"{1} tempest.api.network.test_networks.BulkNetworkOpsIpV6Test.test_bulk_create_delete_network [2.905192s] ... ok\",\n        \"{1} tempest.api.network.test_networks.BulkNetworkOpsIpV6Test.test_bulk_create_delete_port [3.410042s] ... ok\",\n        \"{1} tempest.api.network.test_networks.BulkNetworkOpsIpV6Test.test_bulk_create_delete_subnet [2.928932s] ... ok\",\n        \"{2} tempest.api.network.test_routers.RoutersIpV6Test.test_add_multiple_router_interfaces [15.352034s] ... ok\",\n        \"{0} tempest.api.identity.admin.v3.test_roles.RolesV3TestJSON.test_role_create_update_show_list [0.209525s] ... ok\",\n        \"{2} tempest.api.network.test_routers.RoutersIpV6Test.test_add_remove_router_interface_with_port_id [6.320973s] ... ok\",\n        \"{0} tempest.api.image.v2.test_versions.VersionsTest.test_list_versions [0.009240s] ... ok\",\n        \"{2} tempest.api.network.test_routers.RoutersIpV6Test.test_add_remove_router_interface_with_subnet_id [5.813371s] ... ok\",\n        \"{1} tempest.api.network.test_ports.PortsTestJSON.test_create_port_in_allowed_allocation_pools [5.204598s] ... ok\",\n        \"{2} tempest.api.network.test_routers.RoutersIpV6Test.test_create_show_list_update_delete_router [4.553556s] ... ok\",\n        \"{1} tempest.api.network.test_ports.PortsTestJSON.test_create_port_with_no_securitygroups [4.396013s] ... ok\",\n        \"{1} tempest.api.network.test_ports.PortsTestJSON.test_create_update_delete_port [1.457659s] ... ok\",\n        \"{1} tempest.api.network.test_ports.PortsTestJSON.test_list_ports [0.061039s] ... ok\",\n        \"{1} tempest.api.network.test_ports.PortsTestJSON.test_show_port [0.049157s] ... ok\",\n        \"{2} tempest.api.network.test_subnetpools_extensions.SubnetPoolsTestJSON.test_create_list_show_update_delete_subnetpools [0.535435s] ... ok\",\n        \"{1} tempest.api.network.test_versions.NetworksApiDiscovery.test_api_version_resources [0.007773s] ... ok\",\n        \"{1} tempest.api.network.test_versions.NetworksApiDiscovery.test_show_api_v2_details [0.104734s] ... ok\",\n        \"{0} tempest.api.network.test_floating_ips.FloatingIPTestJSON.test_create_floating_ip_specifying_a_fixed_ip_address [1.623475s] ... ok\",\n        \"{0} tempest.api.network.test_floating_ips.FloatingIPTestJSON.test_create_list_show_update_delete_floating_ip [2.494254s] ... ok\",\n        \"{2} tempest.api.object_storage.test_account_services.AccountTest.test_list_account_metadata [0.042736s] ... ok\",\n        \"{2} tempest.api.object_storage.test_account_services.AccountTest.test_list_containers [0.015456s] ... ok\",\n        \"{1} tempest.api.object_storage.test_object_services.ObjectTest.test_create_object [0.060405s] ... ok\",\n        \"{1} tempest.api.object_storage.test_object_services.ObjectTest.test_get_object [0.036170s] ... ok\",\n        \"{1} tempest.api.object_storage.test_object_services.ObjectTest.test_list_object_metadata [0.030012s] ... ok\",\n        \"{1} tempest.api.object_storage.test_object_services.ObjectTest.test_update_object_metadata [0.060115s] ... ok\",\n        \"{2} tempest.api.object_storage.test_container_quotas.ContainerQuotasTest.test_upload_large_object [0.180045s] ... ok\",\n        \"{2} tempest.api.object_storage.test_container_quotas.ContainerQuotasTest.test_upload_too_many_objects [0.159305s] ... ok\",\n        \"{2} tempest.api.object_storage.test_container_quotas.ContainerQuotasTest.test_upload_valid_object [0.100581s] ... ok\",\n        \"{0} tempest.api.network.test_ports.PortsIpV6TestJSON.test_create_port_in_allowed_allocation_pools [3.182550s] ... ok\",\n        \"{0} tempest.api.network.test_ports.PortsIpV6TestJSON.test_create_port_with_no_securitygroups [2.809686s] ... ok\",\n        \"{0} tempest.api.network.test_ports.PortsIpV6TestJSON.test_create_update_delete_port [1.077706s] ... ok\",\n        \"{0} tempest.api.network.test_ports.PortsIpV6TestJSON.test_list_ports [0.050407s] ... ok\",\n        \"{0} tempest.api.network.test_ports.PortsIpV6TestJSON.test_show_port [0.049320s] ... ok\",\n        \"{1} neutron_tempest_plugin.api.admin.test_tag.TagFilterPortTestJSON.test_filter_port_tags [1.728471s] ... ok\",\n        \"{2} tempest.scenario.test_dashboard_basic_ops.TestDashboardBasicOps.test_basic_scenario [6.482377s] ... ok\",\n        \"{1} neutron_tempest_plugin.api.admin.test_tag.TagFilterSecGroupTestJSON.test_filter_security_group_tags [0.451785s] ... ok\",\n        \"{3} tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_reboot_server_hard [18.942391s] ... ok\",\n        \"{0} tempest.api.network.test_routers.RoutersTest.test_add_multiple_router_interfaces [12.976368s] ... ok\",\n        \"{1} neutron_tempest_plugin.api.admin.test_tag.TagSubnetPoolTestJSON.test_subnetpool_tags [1.297360s] ... ok\",\n        \"{0} tempest.api.network.test_routers.RoutersTest.test_add_remove_router_interface_with_port_id [7.708847s] ... ok\",\n        \"{3} tempest.api.identity.admin.v3.test_endpoints.EndPointsTestJSON.test_update_endpoint [0.587323s] ... ok\",\n        \"{1} neutron_tempest_plugin.api.admin.test_tag.TagSubnetTestJSON.test_subnet_tags [0.871817s] ... ok\",\n        \"{3} tempest.api.network.test_extensions.ExtensionsTestJSON.test_list_show_extensions [0.899420s] ... ok\",\n        \"{1} setUpClass (neutron_tempest_plugin.vpnaas.api.test_vpnaas.VPNaaSTestJSON) ... SKIPPED: vpnaas extension not enabled.\",\n        \"{3} tempest.api.network.test_networks.NetworksTest.test_create_update_delete_network_subnet [2.837927s] ... ok\",\n        \"{3} tempest.api.network.test_networks.NetworksTest.test_external_network_visibility [0.205181s] ... ok\",\n        \"{3} tempest.api.network.test_networks.NetworksTest.test_list_networks [0.108839s] ... ok\",\n        \"{3} tempest.api.network.test_networks.NetworksTest.test_list_subnets [0.050421s] ... ok\",\n        \"{0} tempest.api.network.test_routers.RoutersTest.test_add_remove_router_interface_with_subnet_id [16.624598s] ... ok\",\n        \"{3} tempest.api.network.test_networks.NetworksTest.test_show_network [0.604144s] ... ok\",\n        \"{3} tempest.api.network.test_networks.NetworksTest.test_show_subnet [0.044985s] ... ok\",\n        \"{0} tempest.api.network.test_routers.RoutersTest.test_create_show_list_update_delete_router [3.555721s] ... ok\",\n        \"{3} tempest.api.network.test_security_groups.SecGroupIPv6Test.test_create_list_update_show_delete_security_group [0.955497s] ... ok\",\n        \"{3} tempest.api.network.test_security_groups.SecGroupIPv6Test.test_create_show_delete_security_group_rule [1.660493s] ... ok\",\n        \"{3} tempest.api.network.test_security_groups.SecGroupIPv6Test.test_list_security_groups [0.052384s] ... ok\",\n        \"{0} tempest.api.object_storage.test_account_quotas.AccountQuotasTest.test_admin_modify_quota [0.260325s] ... ok\",\n        \"{0} tempest.api.object_storage.test_account_quotas.AccountQuotasTest.test_overlimit_upload [0.047805s] ... ok\",\n        \"{0} tempest.api.object_storage.test_account_quotas.AccountQuotasTest.test_upload_valid_object [0.058237s] ... ok\",\n        \"{3} tempest.api.network.test_security_groups.SecGroupTest.test_create_list_update_show_delete_security_group [1.595949s] ... ok\",\n        \"{3} tempest.api.network.test_security_groups.SecGroupTest.test_create_show_delete_security_group_rule [1.644817s] ... ok\",\n        \"{3} tempest.api.network.test_security_groups.SecGroupTest.test_list_security_groups [0.049140s] ... ok\",\n        \"{0} tempest.api.object_storage.test_container_services.ContainerTest.test_create_container [0.361703s] ... ok\",\n        \"{0} tempest.api.object_storage.test_container_services.ContainerTest.test_list_container_contents [0.099030s] ... ok\",\n        \"{0} tempest.api.object_storage.test_container_services.ContainerTest.test_list_container_metadata [0.057649s] ... ok\",\n        \"{0} tempest.api.volume.test_versions.VersionsTest.test_list_versions [0.011338s] ... ok\",\n        \"{3} tempest.api.volume.test_volumes_actions.VolumesActionsTest.test_attach_detach_volume_to_instance [9.402677s] ... ok\",\n        \"{3} tempest.api.volume.test_volumes_get.VolumesGetTest.test_volume_create_get_update_delete [5.399387s] ... ok\",\n        \"2026-03-01 22:21:19.833 18 WARNING tempest.lib.common.ssh [-] Failed to establish authenticated ssh connection to cirros@10.96.250.218 ([Errno None] Unable to connect to port 22 on 10.96.250.218). Number attempts: 1. Retry after 2 seconds.: paramiko.ssh_exception.NoValidConnectionsError: [Errno None] Unable to connect to port 22 on 10.96.250.218\",\n        \"{3} tempest.api.volume.test_volumes_get.VolumesGetTest.test_volume_create_get_update_delete_from_image [5.562776s] ... ok\",\n        \"{3} tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list [0.063223s] ... ok\",\n        \"{0} tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_network_basic_ops [57.493260s] ... ok\",\n        \"{0} neutron_tempest_plugin.api.admin.test_tag.TagFilterRouterTestJSON.test_filter_router_tags [0.656806s] ... ok\",\n        \"{0} neutron_tempest_plugin.api.admin.test_tag.TagNetworkTestJSON.test_network_tags [0.972950s] ... ok\",\n        \"{0} neutron_tempest_plugin.api.admin.test_tag.TagQosPolicyTestJSON.test_qos_policy_tags [0.523558s] ... ok\",\n        \"{3} tempest.scenario.test_server_basic_ops.TestServerBasicOps.test_server_basic_ops [31.758761s] ... ok\",\n        \"{0} neutron_tempest_plugin.api.admin.test_tag.TagSecGroupTestJSON.test_security_group_tags [0.729708s] ... ok\",\n        \"{3} setUpClass (tempest.scenario.test_server_multinode.TestServerMultinode) ... SKIPPED: Less than 2 compute nodes, skipping multinode tests.\",\n        \"{0} neutron_tempest_plugin.api.admin.test_tag.TagTrunkTestJSON.test_trunk_tags [0.523296s] ... ok\",\n        \"{3} neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_create_rbac_policy_with_target_tenant_none [2.969443s] ... ok\",\n        \"{3} neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_create_rbac_policy_with_target_tenant_too_long_id [1.811447s] ... ok\",\n        \"{3} neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_network_only_visible_to_policy_target [3.131806s] ... ok\",\n        \"{3} neutron_tempest_plugin.api.admin.test_tag.TagFilterFloatingIpTestJSON.test_filter_floatingip_tags [0.654962s] ... ok\",\n        \"{3} neutron_tempest_plugin.api.admin.test_tag.TagFilterSubnetTestJSON.test_filter_subnet_tags [0.535262s] ... ok\",\n        \"{2} octavia_tempest_plugin.tests.scenario.v2.test_traffic_ops.TrafficOperationsScenarioTest.test_basic_http_traffic [58.302840s] ... ok\",\n        \"{3} neutron_tempest_plugin.api.admin.test_tag.TagFilterTrunkTestJSON.test_filter_trunk_tags [0.284411s] ... ok\",\n        \"{3} neutron_tempest_plugin.api.admin.test_tag.TagPortTestJSON.test_port_tags [0.801700s] ... ok\",\n        \"{2} neutron_tempest_plugin.api.admin.test_tag.TagFilterNetworkTestJSON.test_filter_network_tags [1.011624s] ... ok\",\n        \"{2} neutron_tempest_plugin.api.admin.test_tag.TagFilterQosPolicyTestJSON.test_filter_qos_policy_tags [0.965179s] ... ok\",\n        \"{2} neutron_tempest_plugin.api.admin.test_tag.TagFilterSubnetpoolTestJSON.test_filter_subnetpool_tags [0.266950s] ... ok\",\n        \"{2} neutron_tempest_plugin.api.admin.test_tag.TagFloatingIpTestJSON.test_floatingip_tags [0.764660s] ... ok\",\n        \"{2} neutron_tempest_plugin.api.admin.test_tag.TagRouterTestJSON.test_router_tags [0.993067s] ... ok\",\n        \"{2} neutron_tempest_plugin.api.admin.test_tag.UpdateTagsTest.test_update_tags_affects_only_updated_resource [2.153635s] ... ok\",\n        \"\",\n        \"======\",\n        \"Totals\",\n        \"======\",\n        \"Ran: 131 tests in 413.1190 sec.\",\n        \" - Passed: 129\",\n        \" - Skipped: 2\",\n        \" - Expected Fail: 0\",\n        \" - Unexpected Success: 0\",\n        \" - Failed: 0\",\n        \"Sum of execute time for each test: 406.1944 sec.\",\n        \"\",\n        \"==============\",\n        \"Worker Balance\",\n        \"==============\",\n        \" - Worker 0 (32 tests) => 0:04:33.129895\",\n        \" - Worker 1 (36 tests) => 0:02:37.971900\",\n        \" - Worker 2 (32 tests) => 0:06:45.470897\",\n        \" - Worker 3 (31 tests) => 0:05:39.364864\",\n        \"+ tempest cleanup\",\n        \"\"\n    ]\n}\n\nTASK [vexxhost.atmosphere.tempest : Fail when tempest result is failed] ********\nSunday 01 March 2026  22:25:00 +0000 (0:00:00.091)       0:08:16.851 **********\nskipping: [instance]\n\nPLAY RECAP *********************************************************************\ninstance                   : ok=25   changed=2    unreachable=0    failed=0    skipped=11   rescued=0    ignored=0\nlocalhost                  : ok=1    changed=1    unreachable=0    failed=0    skipped=0    rescued=0    ignored=0\n\nSunday 01 March 2026  22:25:00 +0000 (0:00:00.063)       0:08:16.915 **********\n===============================================================================\nvexxhost.atmosphere.tempest : Deploy Helm chart ----------------------- 478.21s\nGathering Facts --------------------------------------------------------- 5.31s\nRun \"stestr\" tests ------------------------------------------------------ 2.88s\nvexxhost.atmosphere.tempest : Get test flavor object -------------------- 1.16s\nvexxhost.atmosphere.tempest : Get test network object ------------------- 1.14s\nvexxhost.atmosphere.tempest : Get test image object --------------------- 1.13s\nvexxhost.atmosphere.openstacksdk : Install openstacksdk ----------------- 1.11s\nvexxhost.atmosphere.tempest : Get tempest job object -------------------- 1.04s\nvexxhost.atmosphere.tempest : Get tempest log --------------------------- 1.00s\nvexxhost.atmosphere.openstacksdk : Generate cloud config file ----------- 0.65s\nvexxhost.kubernetes.upload_helm_chart : Upload Helm chart --------------- 0.65s\nvexxhost.atmosphere.openstacksdk : Create openstack config directory ---- 0.38s\nvexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints --- 0.18s\nvexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks --- 0.16s\nGenerate OpenStack-Helm endpoints --------------------------------------- 0.16s\nvexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints --- 0.13s\nvexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts ----------- 0.10s\nvexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints --- 0.10s\nvexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts ----------- 0.10s\nvexxhost.atmosphere.tempest : Set test flavor ref ----------------------- 0.10s\nINFO     [aio > verify] Executed: Successful\nINFO     [aio > cleanup] Executing\nWARNING  [aio > cleanup] Executed: Missing playbook (Remove from test_sequence to suppress)\nINFO     [aio > destroy] Executing\nWARNING  [aio > destroy] Skipping, '--destroy=never' requested.\nINFO     [aio > destroy] Executed: Successful\nWARNING  Molecule executed 1 scenario (1 missing files)",
                            "stdout_lines": [
                                "Using CPython 3.10.12 interpreter at: /usr/bin/python3",
                                "Creating virtual environment at: .venv",
                                "   Building atmosphere @ file:///home/zuul/src/github.com/vexxhost/atmosphere",
                                "Downloading pygments (1.2MiB)",
                                "Downloading setuptools (1.1MiB)",
                                "Downloading rjsonnet (1.2MiB)",
                                "Downloading ansible-core (2.1MiB)",
                                "Downloading netaddr (2.2MiB)",
                                "Downloading cryptography (4.2MiB)",
                                "Downloading openstacksdk (1.7MiB)",
                                "Downloading kubernetes (1.9MiB)",
                                "   Building pyperclip==1.9.0",
                                " Downloading rjsonnet",
                                " Downloading pygments",
                                " Downloading netaddr",
                                " Downloading cryptography",
                                " Downloading setuptools",
                                " Downloading kubernetes",
                                " Downloading ansible-core",
                                " Downloading openstacksdk",
                                "      Built pyperclip==1.9.0",
                                "      Built atmosphere @ file:///home/zuul/src/github.com/vexxhost/atmosphere",
                                "Installed 79 packages in 37ms",
                                "WARNING  Molecule scenarios should migrate to 'extensions/molecule'",
                                "INFO     [aio > discovery] scenario test matrix: dependency, cleanup, destroy, syntax, create, prepare, converge, idempotence, side_effect, verify, cleanup, destroy",
                                "INFO     [aio > prerun] Performing prerun with role_name_check=0...",
                                "INFO     [aio > dependency] Executing",
                                "WARNING  [aio > dependency] Missing roles requirements file: requirements.yml",
                                "WARNING  [aio > dependency] Missing collections requirements file: collections.yml",
                                "WARNING  [aio > dependency] Executed: 2 missing (Remove from test_sequence to suppress)",
                                "INFO     [aio > cleanup] Executing",
                                "WARNING  [aio > cleanup] Executed: Missing playbook (Remove from test_sequence to suppress)",
                                "INFO     [aio > destroy] Executing",
                                "WARNING  [aio > destroy] Skipping, '--destroy=never' requested.",
                                "INFO     [aio > destroy] Executed: Successful",
                                "INFO     [aio > syntax] Executing",
                                "",
                                "playbook: /home/zuul/src/github.com/vexxhost/atmosphere/molecule/aio/converge.yml",
                                "INFO     [aio > syntax] Executed: Successful",
                                "INFO     [aio > create] Executing",
                                "WARNING  [aio > create] Executed: Missing playbook (Remove from test_sequence to suppress)",
                                "INFO     [aio > prepare] Executing",
                                "",
                                "PLAY [Prepare] *****************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:04:21 +0000 (0:00:00.026)       0:00:00.026 **********",
                                "[WARNING]: Platform linux on host instance is using the discovered Python",
                                "interpreter at /usr/bin/python3.10, but future installation of another Python",
                                "interpreter could change the meaning of that path. See",
                                "https://docs.ansible.com/ansible-",
                                "core/2.17/reference_appendices/interpreter_discovery.html for more information.",
                                "ok: [instance]",
                                "",
                                "TASK [Configure short hostname] ************************************************",
                                "Sunday 01 March 2026  21:04:22 +0000 (0:00:01.164)       0:00:01.191 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Ensure hostname inside hosts file] ***************************************",
                                "Sunday 01 March 2026  21:04:23 +0000 (0:00:00.680)       0:00:01.871 **********",
                                "[WARNING]: Module remote_tmp /root/.ansible/tmp did not exist and was created",
                                "with a mode of 0700, this may cause issues when running as another user. To",
                                "avoid this, create the remote_tmp dir with the correct permissions manually",
                                "changed: [instance]",
                                "",
                                "TASK [Install \"dirmngr\" for GPG keyserver operations] **************************",
                                "Sunday 01 March 2026  21:04:23 +0000 (0:00:00.264)       0:00:02.136 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Purge \"snapd\" package] ***************************************************",
                                "Sunday 01 March 2026  21:04:24 +0000 (0:00:01.123)       0:00:03.259 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [Generate workspace for Atmosphere] ***************************************",
                                "",
                                "TASK [Create folders for workspace] ********************************************",
                                "Sunday 01 March 2026  21:04:25 +0000 (0:00:00.686)       0:00:03.946 **********",
                                "changed: [localhost] => (item=group_vars)",
                                "changed: [localhost] => (item=group_vars/all)",
                                "changed: [localhost] => (item=group_vars/controllers)",
                                "changed: [localhost] => (item=group_vars/cephs)",
                                "changed: [localhost] => (item=group_vars/computes)",
                                "changed: [localhost] => (item=host_vars)",
                                "",
                                "PLAY [Generate Ceph control plane configuration for workspace] *****************",
                                "",
                                "TASK [Ensure the Ceph control plane configuration file exists] *****************",
                                "Sunday 01 March 2026  21:04:26 +0000 (0:00:01.010)       0:00:04.956 **********",
                                "changed: [localhost]",
                                "",
                                "TASK [Load the current Ceph control plane configuration into a variable] *******",
                                "Sunday 01 March 2026  21:04:26 +0000 (0:00:00.182)       0:00:05.139 **********",
                                "ok: [localhost]",
                                "",
                                "TASK [Generate Ceph control plane values for missing variables] ****************",
                                "Sunday 01 March 2026  21:04:26 +0000 (0:00:00.024)       0:00:05.163 **********",
                                "ok: [localhost] => (item={'key': 'ceph_fsid', 'value': '7aabea57-4b0a-554c-aac8-9f3c15dba566'})",
                                "ok: [localhost] => (item={'key': 'ceph_mon_public_network', 'value': '10.96.240.0/24'})",
                                "",
                                "TASK [Write new Ceph control plane configuration file to disk] *****************",
                                "Sunday 01 March 2026  21:04:26 +0000 (0:00:00.046)       0:00:05.210 **********",
                                "changed: [localhost]",
                                "",
                                "PLAY [Generate Ceph OSD configuration for workspace] ***************************",
                                "",
                                "TASK [Ensure the Ceph OSDs configuration file exists] **************************",
                                "Sunday 01 March 2026  21:04:27 +0000 (0:00:00.521)       0:00:05.732 **********",
                                "changed: [localhost]",
                                "",
                                "TASK [Load the current Ceph OSDs configuration into a variable] ****************",
                                "Sunday 01 March 2026  21:04:27 +0000 (0:00:00.165)       0:00:05.897 **********",
                                "ok: [localhost]",
                                "",
                                "TASK [Generate Ceph OSDs values for missing variables] *************************",
                                "Sunday 01 March 2026  21:04:27 +0000 (0:00:00.028)       0:00:05.925 **********",
                                "ok: [localhost] => (item={'key': 'ceph_osd_devices', 'value': ['/dev/vdb', '/dev/vdc', '/dev/vdd']})",
                                "",
                                "TASK [Write new Ceph OSDs configuration file to disk] **************************",
                                "Sunday 01 March 2026  21:04:27 +0000 (0:00:00.031)       0:00:05.957 **********",
                                "changed: [localhost]",
                                "",
                                "PLAY [Generate Kubernetes configuration for workspace] *************************",
                                "",
                                "TASK [Ensure the Kubernetes configuration file exists] *************************",
                                "Sunday 01 March 2026  21:04:27 +0000 (0:00:00.350)       0:00:06.307 **********",
                                "changed: [localhost]",
                                "",
                                "TASK [Load the current Kubernetes configuration into a variable] ***************",
                                "Sunday 01 March 2026  21:04:27 +0000 (0:00:00.184)       0:00:06.491 **********",
                                "ok: [localhost]",
                                "",
                                "TASK [Generate Kubernetes values for missing variables] ************************",
                                "Sunday 01 March 2026  21:04:27 +0000 (0:00:00.025)       0:00:06.517 **********",
                                "ok: [localhost] => (item={'key': 'kubernetes_hostname', 'value': '10.96.240.10'})",
                                "ok: [localhost] => (item={'key': 'kubernetes_keepalived_vrid', 'value': 42})",
                                "ok: [localhost] => (item={'key': 'kubernetes_keepalived_vip', 'value': '10.96.240.10'})",
                                "",
                                "TASK [Write new Kubernetes configuration file to disk] *************************",
                                "Sunday 01 March 2026  21:04:28 +0000 (0:00:00.039)       0:00:06.557 **********",
                                "changed: [localhost]",
                                "",
                                "PLAY [Generate Keepalived configuration for workspace] *************************",
                                "",
                                "TASK [Ensure the Keeaplived configuration file exists] *************************",
                                "Sunday 01 March 2026  21:04:28 +0000 (0:00:00.363)       0:00:06.920 **********",
                                "changed: [localhost]",
                                "",
                                "TASK [Load the current Keepalived configuration into a variable] ***************",
                                "Sunday 01 March 2026  21:04:28 +0000 (0:00:00.167)       0:00:07.087 **********",
                                "ok: [localhost]",
                                "",
                                "TASK [Generate Keepalived values for missing variables] ************************",
                                "Sunday 01 March 2026  21:04:28 +0000 (0:00:00.025)       0:00:07.113 **********",
                                "ok: [localhost] => (item={'key': 'keepalived_interface', 'value': 'br-ex'})",
                                "ok: [localhost] => (item={'key': 'keepalived_vip', 'value': '10.96.250.10'})",
                                "",
                                "TASK [Write new Keepalived configuration file to disk] *************************",
                                "Sunday 01 March 2026  21:04:28 +0000 (0:00:00.034)       0:00:07.147 **********",
                                "changed: [localhost]",
                                "",
                                "PLAY [Generate endpoints for workspace] ****************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:04:28 +0000 (0:00:00.353)       0:00:07.501 **********",
                                "ok: [localhost]",
                                "",
                                "TASK [Ensure the endpoints file exists] ****************************************",
                                "Sunday 01 March 2026  21:04:29 +0000 (0:00:00.680)       0:00:08.182 **********",
                                "changed: [localhost]",
                                "",
                                "TASK [Load the current endpoints into a variable] ******************************",
                                "Sunday 01 March 2026  21:04:29 +0000 (0:00:00.169)       0:00:08.352 **********",
                                "ok: [localhost]",
                                "",
                                "TASK [Generate endpoint skeleton for missing variables] ************************",
                                "Sunday 01 March 2026  21:04:29 +0000 (0:00:00.027)       0:00:08.379 **********",
                                "ok: [localhost] => (item=keycloak_host)",
                                "ok: [localhost] => (item=kube_prometheus_stack_grafana_host)",
                                "ok: [localhost] => (item=kube_prometheus_stack_alertmanager_host)",
                                "ok: [localhost] => (item=kube_prometheus_stack_prometheus_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_region_name)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_keystone_api_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_glance_api_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_cinder_api_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_placement_api_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_barbican_api_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_neutron_api_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_nova_api_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_nova_novnc_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_ironic_api_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_designate_api_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_octavia_api_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_magnum_api_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_magnum_registry_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_heat_api_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_heat_cfn_api_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_horizon_api_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_rgw_host)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_manila_api_host)",
                                "",
                                "TASK [Write new endpoints file to disk] ****************************************",
                                "Sunday 01 March 2026  21:04:30 +0000 (0:00:00.786)       0:00:09.165 **********",
                                "changed: [localhost]",
                                "",
                                "TASK [Ensure the endpoints file exists] ****************************************",
                                "Sunday 01 March 2026  21:04:30 +0000 (0:00:00.349)       0:00:09.515 **********",
                                "changed: [localhost]",
                                "",
                                "PLAY [Generate Neutron configuration for workspace] ****************************",
                                "",
                                "TASK [Ensure the Neutron configuration file exists] ****************************",
                                "Sunday 01 March 2026  21:04:31 +0000 (0:00:00.170)       0:00:09.685 **********",
                                "changed: [localhost]",
                                "",
                                "TASK [Load the current Neutron configuration into a variable] ******************",
                                "Sunday 01 March 2026  21:04:31 +0000 (0:00:00.185)       0:00:09.870 **********",
                                "ok: [localhost]",
                                "",
                                "TASK [Generate Neutron values for missing variables] ***************************",
                                "Sunday 01 March 2026  21:04:31 +0000 (0:00:00.028)       0:00:09.899 **********",
                                "ok: [localhost] => (item={'key': 'neutron_networks', 'value': [{'name': 'public', 'external': True, 'shared': True, 'mtu_size': 1500, 'port_security_enabled': True, 'provider_network_type': 'flat', 'provider_physical_network': 'external', 'subnets': [{'name': 'public-subnet', 'cidr': '10.96.250.0/24', 'gateway_ip': '10.96.250.10', 'allocation_pool_start': '10.96.250.200', 'allocation_pool_end': '10.96.250.220', 'enable_dhcp': True}]}]})",
                                "",
                                "TASK [Write new Neutron configuration file to disk] ****************************",
                                "Sunday 01 March 2026  21:04:31 +0000 (0:00:00.039)       0:00:09.938 **********",
                                "changed: [localhost]",
                                "",
                                "PLAY [Generate Nova configuration for workspace] *******************************",
                                "",
                                "TASK [Ensure the Nova configuration file exists] *******************************",
                                "Sunday 01 March 2026  21:04:31 +0000 (0:00:00.339)       0:00:10.277 **********",
                                "changed: [localhost]",
                                "",
                                "TASK [Load the current Nova configuration into a variable] *********************",
                                "Sunday 01 March 2026  21:04:31 +0000 (0:00:00.170)       0:00:10.447 **********",
                                "ok: [localhost]",
                                "",
                                "TASK [Generate Nova values for missing variables] ******************************",
                                "Sunday 01 March 2026  21:04:31 +0000 (0:00:00.033)       0:00:10.480 **********",
                                "ok: [localhost] => (item={'key': 'nova_flavors', 'value': [{'name': 'm1.tiny', 'ram': 512, 'disk': 1, 'vcpus': 1}, {'name': 'm1.small', 'ram': 2048, 'disk': 20, 'vcpus': 1}, {'name': 'm1.medium', 'ram': 4096, 'disk': 40, 'vcpus': 2}, {'name': 'm1.large', 'ram': 8192, 'disk': 80, 'vcpus': 4}, {'name': 'm1.xlarge', 'ram': 16384, 'disk': 160, 'vcpus': 8}]})",
                                "",
                                "TASK [Write new Nova configuration file to disk] *******************************",
                                "Sunday 01 March 2026  21:04:31 +0000 (0:00:00.041)       0:00:10.522 **********",
                                "changed: [localhost]",
                                "",
                                "PLAY [Generate secrets for workspace] ******************************************",
                                "",
                                "TASK [Ensure the secrets file exists] ******************************************",
                                "Sunday 01 March 2026  21:04:32 +0000 (0:00:00.352)       0:00:10.875 **********",
                                "changed: [localhost]",
                                "",
                                "TASK [Load the current secrets into a variable] ********************************",
                                "Sunday 01 March 2026  21:04:32 +0000 (0:00:00.172)       0:00:11.047 **********",
                                "ok: [localhost]",
                                "",
                                "TASK [Generate secrets for missing variables] **********************************",
                                "Sunday 01 March 2026  21:04:32 +0000 (0:00:00.031)       0:00:11.079 **********",
                                "ok: [localhost] => (item=heat_auth_encryption_key)",
                                "ok: [localhost] => (item=keepalived_password)",
                                "ok: [localhost] => (item=keycloak_admin_password)",
                                "ok: [localhost] => (item=keycloak_database_password)",
                                "ok: [localhost] => (item=keystone_keycloak_client_secret)",
                                "ok: [localhost] => (item=keystone_oidc_crypto_passphrase)",
                                "ok: [localhost] => (item=kube_prometheus_stack_grafana_admin_password)",
                                "ok: [localhost] => (item=octavia_heartbeat_key)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_rabbitmq_admin_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_memcached_secret_key)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_keystone_admin_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_keystone_mariadb_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_keystone_rabbitmq_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_glance_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_glance_mariadb_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_glance_rabbitmq_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_cinder_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_cinder_mariadb_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_cinder_rabbitmq_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_placement_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_placement_mariadb_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_barbican_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_barbican_mariadb_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_neutron_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_neutron_mariadb_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_neutron_rabbitmq_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_neutron_metadata_secret)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_nova_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_nova_mariadb_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_nova_rabbitmq_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_ironic_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_ironic_mariadb_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_ironic_rabbitmq_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_designate_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_designate_mariadb_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_designate_rabbitmq_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_octavia_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_octavia_mariadb_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_octavia_rabbitmq_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_magnum_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_magnum_mariadb_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_magnum_rabbitmq_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_heat_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_heat_trustee_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_heat_stack_user_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_heat_mariadb_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_heat_rabbitmq_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_horizon_mariadb_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_tempest_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_openstack_exporter_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_rgw_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_manila_keystone_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_manila_mariadb_password)",
                                "ok: [localhost] => (item=openstack_helm_endpoints_staffeln_mariadb_password)",
                                "",
                                "TASK [Generate base64 encoded secrets] *****************************************",
                                "Sunday 01 March 2026  21:04:32 +0000 (0:00:00.401)       0:00:11.480 **********",
                                "ok: [localhost] => (item=barbican_kek)",
                                "",
                                "TASK [Generate temporary files for generating keys for missing variables] ******",
                                "Sunday 01 March 2026  21:04:32 +0000 (0:00:00.057)       0:00:11.537 **********",
                                "changed: [localhost] => (item=manila_ssh_key)",
                                "changed: [localhost] => (item=nova_ssh_key)",
                                "",
                                "TASK [Generate SSH keys for missing variables] *********************************",
                                "Sunday 01 March 2026  21:04:33 +0000 (0:00:00.387)       0:00:11.924 **********",
                                "changed: [localhost] => (item=manila_ssh_key)",
                                "changed: [localhost] => (item=nova_ssh_key)",
                                "",
                                "TASK [Set values for SSH keys] *************************************************",
                                "Sunday 01 March 2026  21:04:36 +0000 (0:00:03.221)       0:00:15.146 **********",
                                "ok: [localhost] => (item=manila_ssh_key)",
                                "ok: [localhost] => (item=nova_ssh_key)",
                                "",
                                "TASK [Delete the temporary files generated for SSH keys] ***********************",
                                "Sunday 01 March 2026  21:04:36 +0000 (0:00:00.052)       0:00:15.199 **********",
                                "changed: [localhost] => (item=manila_ssh_key)",
                                "changed: [localhost] => (item=nova_ssh_key)",
                                "",
                                "TASK [Write new secrets file to disk] ******************************************",
                                "Sunday 01 March 2026  21:04:37 +0000 (0:00:00.352)       0:00:15.552 **********",
                                "changed: [localhost]",
                                "",
                                "TASK [Encrypt secrets file with Vault password] ********************************",
                                "Sunday 01 March 2026  21:04:37 +0000 (0:00:00.346)       0:00:15.898 **********",
                                "skipping: [localhost]",
                                "",
                                "PLAY [Setup networking] ********************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:04:37 +0000 (0:00:00.037)       0:00:15.936 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create bridge for management network] ************************************",
                                "Sunday 01 March 2026  21:04:38 +0000 (0:00:00.693)       0:00:16.629 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create fake interface for management bridge] *****************************",
                                "Sunday 01 March 2026  21:04:38 +0000 (0:00:00.314)       0:00:16.943 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Assign dummy interface to management bridge] *****************************",
                                "Sunday 01 March 2026  21:04:38 +0000 (0:00:00.203)       0:00:17.147 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Assign IP address for management bridge] *********************************",
                                "Sunday 01 March 2026  21:04:38 +0000 (0:00:00.196)       0:00:17.343 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Bring up interfaces] *****************************************************",
                                "Sunday 01 March 2026  21:04:39 +0000 (0:00:00.210)       0:00:17.554 **********",
                                "ok: [instance] => (item=br-mgmt)",
                                "ok: [instance] => (item=dummy0)",
                                "",
                                "PLAY [Create devices for Ceph] *************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:04:39 +0000 (0:00:00.382)       0:00:17.937 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Install depedencies] *****************************************************",
                                "Sunday 01 March 2026  21:04:40 +0000 (0:00:00.745)       0:00:18.682 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Start up service] ********************************************************",
                                "Sunday 01 March 2026  21:04:59 +0000 (0:00:19.624)       0:00:38.307 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Generate lvm.conf] *******************************************************",
                                "Sunday 01 March 2026  21:05:00 +0000 (0:00:00.518)       0:00:38.826 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Write /etc/lvm/lvm.conf] *************************************************",
                                "Sunday 01 March 2026  21:05:00 +0000 (0:00:00.220)       0:00:39.046 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Get list of all loopback devices] ****************************************",
                                "Sunday 01 March 2026  21:05:00 +0000 (0:00:00.446)       0:00:39.493 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Fail if there is any existing loopback devices] **************************",
                                "Sunday 01 March 2026  21:05:01 +0000 (0:00:00.198)       0:00:39.691 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [Create devices for Ceph] *************************************************",
                                "Sunday 01 March 2026  21:05:01 +0000 (0:00:00.030)       0:00:39.722 **********",
                                "changed: [instance] => (item=osd0)",
                                "changed: [instance] => (item=osd1)",
                                "changed: [instance] => (item=osd2)",
                                "",
                                "TASK [Set permissions on loopback devices] *************************************",
                                "Sunday 01 March 2026  21:05:01 +0000 (0:00:00.509)       0:00:40.232 **********",
                                "changed: [instance] => (item=osd0)",
                                "changed: [instance] => (item=osd1)",
                                "changed: [instance] => (item=osd2)",
                                "",
                                "TASK [Start loop devices] ******************************************************",
                                "Sunday 01 March 2026  21:05:02 +0000 (0:00:00.521)       0:00:40.754 **********",
                                "changed: [instance] => (item=osd0)",
                                "changed: [instance] => (item=osd1)",
                                "changed: [instance] => (item=osd2)",
                                "",
                                "TASK [Create a volume group for each loop device] ******************************",
                                "Sunday 01 March 2026  21:05:02 +0000 (0:00:00.735)       0:00:41.489 **********",
                                "changed: [instance] => (item=osd0)",
                                "changed: [instance] => (item=osd1)",
                                "changed: [instance] => (item=osd2)",
                                "",
                                "TASK [Create a logical volume for each loop device] ****************************",
                                "Sunday 01 March 2026  21:05:06 +0000 (0:00:03.188)       0:00:44.678 **********",
                                "changed: [instance] => (item=ceph-instance-osd0)",
                                "changed: [instance] => (item=ceph-instance-osd1)",
                                "changed: [instance] => (item=ceph-instance-osd2)",
                                "",
                                "PLAY [controllers] *************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:05:08 +0000 (0:00:01.924)       0:00:46.602 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Set masquerade rule] *****************************************************",
                                "Sunday 01 March 2026  21:05:08 +0000 (0:00:00.922)       0:00:47.524 **********",
                                "changed: [instance]",
                                "",
                                "PLAY RECAP *********************************************************************",
                                "instance                   : ok=24   changed=10   unreachable=0    failed=0    skipped=1    rescued=0    ignored=0",
                                "localhost                  : ok=40   changed=21   unreachable=0    failed=0    skipped=1    rescued=0    ignored=0",
                                "",
                                "Sunday 01 March 2026  21:05:09 +0000 (0:00:00.459)       0:00:47.984 **********",
                                "===============================================================================",
                                "Install depedencies ---------------------------------------------------- 19.62s",
                                "Generate SSH keys for missing variables --------------------------------- 3.22s",
                                "Create a volume group for each loop device ------------------------------ 3.19s",
                                "Create a logical volume for each loop device ---------------------------- 1.92s",
                                "Gathering Facts --------------------------------------------------------- 1.16s",
                                "Install \"dirmngr\" for GPG keyserver operations -------------------------- 1.12s",
                                "Create folders for workspace -------------------------------------------- 1.01s",
                                "Gathering Facts --------------------------------------------------------- 0.92s",
                                "Generate endpoint skeleton for missing variables ------------------------ 0.79s",
                                "Gathering Facts --------------------------------------------------------- 0.75s",
                                "Start loop devices ------------------------------------------------------ 0.74s",
                                "Gathering Facts --------------------------------------------------------- 0.69s",
                                "Purge \"snapd\" package --------------------------------------------------- 0.69s",
                                "Gathering Facts --------------------------------------------------------- 0.68s",
                                "Configure short hostname ------------------------------------------------ 0.68s",
                                "Set permissions on loopback devices ------------------------------------- 0.52s",
                                "Write new Ceph control plane configuration file to disk ----------------- 0.52s",
                                "Start up service -------------------------------------------------------- 0.52s",
                                "Create devices for Ceph ------------------------------------------------- 0.51s",
                                "Set masquerade rule ----------------------------------------------------- 0.46s",
                                "INFO     [aio > prepare] Executed: Successful",
                                "INFO     [aio > converge] Executing",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:05:12 +0000 (0:00:00.017)       0:00:00.017 **********",
                                "[WARNING]: Platform linux on host instance is using the discovered Python",
                                "interpreter at /usr/bin/python3.10, but future installation of another Python",
                                "interpreter could change the meaning of that path. See",
                                "https://docs.ansible.com/ansible-",
                                "core/2.17/reference_appendices/interpreter_discovery.html for more information.",
                                "ok: [instance]",
                                "",
                                "TASK [Set a fact with the \"atmosphere_images\" for other plays] *****************",
                                "Sunday 01 March 2026  21:05:13 +0000 (0:00:01.284)       0:00:01.302 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [Deploy Ceph monitors & managers] *****************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:05:13 +0000 (0:00:00.304)       0:00:01.606 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  21:05:14 +0000 (0:00:00.879)       0:00:02.486 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  21:05:15 +0000 (0:00:00.282)       0:00:02.768 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Sunday 01 March 2026  21:05:15 +0000 (0:00:00.043)       0:00:02.811 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:05:15 +0000 (0:00:00.284)       0:00:03.096 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:05:15 +0000 (0:00:00.069)       0:00:03.166 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:05:16 +0000 (0:00:00.635)       0:00:03.801 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  21:05:16 +0000 (0:00:00.043)       0:00:03.845 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  21:05:16 +0000 (0:00:00.043)       0:00:03.888 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  21:05:16 +0000 (0:00:00.199)       0:00:04.087 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:05:17 +0000 (0:00:01.265)       0:00:05.353 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:05:17 +0000 (0:00:00.067)       0:00:05.421 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:05:18 +0000 (0:00:00.689)       0:00:06.110 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Sunday 01 March 2026  21:05:21 +0000 (0:00:02.910)       0:00:09.021 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Sunday 01 March 2026  21:05:21 +0000 (0:00:00.031)       0:00:09.052 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Sunday 01 March 2026  21:05:21 +0000 (0:00:00.032)       0:00:09.084 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Sunday 01 March 2026  21:05:21 +0000 (0:00:00.025)       0:00:09.109 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Sunday 01 March 2026  21:05:26 +0000 (0:00:05.317)       0:00:14.427 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Sunday 01 March 2026  21:05:27 +0000 (0:00:00.541)       0:00:14.969 **********",
                                "changed: [instance] => (item={'path': '/etc/containerd'})",
                                "changed: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "changed: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "changed: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "changed: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Sunday 01 March 2026  21:05:28 +0000 (0:00:00.894)       0:00:15.863 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Sunday 01 March 2026  21:05:28 +0000 (0:00:00.526)       0:00:16.389 **********",
                                "",
                                "RUNNING HANDLER [vexxhost.containers.containerd : Reload systemd] **************",
                                "Sunday 01 March 2026  21:05:28 +0000 (0:00:00.008)       0:00:16.398 **********",
                                "ok: [instance]",
                                "",
                                "RUNNING HANDLER [vexxhost.containers.containerd : Restart containerd] **********",
                                "Sunday 01 March 2026  21:05:29 +0000 (0:00:00.882)       0:00:17.281 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Sunday 01 March 2026  21:05:30 +0000 (0:00:00.474)       0:00:17.755 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  21:05:30 +0000 (0:00:00.541)       0:00:18.297 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:05:30 +0000 (0:00:00.212)       0:00:18.509 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:05:30 +0000 (0:00:00.066)       0:00:18.576 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:05:31 +0000 (0:00:00.852)       0:00:19.429 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Install AppArmor packages] ******************",
                                "Sunday 01 March 2026  21:05:35 +0000 (0:00:04.278)       0:00:23.707 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************",
                                "Sunday 01 March 2026  21:05:36 +0000 (0:00:00.936)       0:00:24.644 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd service file for docker] *****",
                                "Sunday 01 March 2026  21:05:37 +0000 (0:00:00.313)       0:00:24.957 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create folders for configuration] ***********",
                                "Sunday 01 March 2026  21:05:37 +0000 (0:00:00.443)       0:00:25.401 **********",
                                "changed: [instance] => (item={'path': '/etc/docker'})",
                                "changed: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})",
                                "changed: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd socket file for docker] ******",
                                "Sunday 01 March 2026  21:05:38 +0000 (0:00:00.568)       0:00:25.969 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create docker daemon config file] ***********",
                                "Sunday 01 March 2026  21:05:38 +0000 (0:00:00.446)       0:00:26.416 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Force any restarts if necessary] ************",
                                "Sunday 01 March 2026  21:05:39 +0000 (0:00:00.428)       0:00:26.845 **********",
                                "",
                                "RUNNING HANDLER [vexxhost.containers.containerd : Reload systemd] **************",
                                "Sunday 01 March 2026  21:05:39 +0000 (0:00:00.009)       0:00:26.855 **********",
                                "ok: [instance]",
                                "",
                                "RUNNING HANDLER [vexxhost.containers.docker : Restart docker] ******************",
                                "Sunday 01 March 2026  21:05:39 +0000 (0:00:00.722)       0:00:27.577 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Enable and start service] *******************",
                                "Sunday 01 March 2026  21:05:40 +0000 (0:00:00.821)       0:00:28.399 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******",
                                "Sunday 01 March 2026  21:05:41 +0000 (0:00:00.563)       0:00:28.962 **********",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Install packages] ********************************",
                                "Sunday 01 March 2026  21:05:41 +0000 (0:00:00.060)       0:00:29.022 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure services are started] *********************",
                                "Sunday 01 March 2026  21:05:45 +0000 (0:00:04.692)       0:00:33.715 **********",
                                "ok: [instance] => (item=chronyd)",
                                "ok: [instance] => (item=sshd)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************",
                                "Sunday 01 March 2026  21:05:46 +0000 (0:00:00.640)       0:00:34.355 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************",
                                "Sunday 01 March 2026  21:05:46 +0000 (0:00:00.301)       0:00:34.656 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************",
                                "Sunday 01 March 2026  21:05:47 +0000 (0:00:00.204)       0:00:34.861 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***",
                                "Sunday 01 March 2026  21:05:47 +0000 (0:00:00.471)       0:00:35.332 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Get `cephadm ls` status] *****************************",
                                "Sunday 01 March 2026  21:05:47 +0000 (0:00:00.370)       0:00:35.703 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Parse the `cephadm ls` output] ***********************",
                                "Sunday 01 March 2026  21:05:49 +0000 (0:00:01.673)       0:00:37.377 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Assimilate existing configs in `ceph.conf`] **********",
                                "Sunday 01 March 2026  21:05:49 +0000 (0:00:00.061)       0:00:37.439 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Adopt monitor to cluster] ****************************",
                                "Sunday 01 March 2026  21:05:49 +0000 (0:00:00.048)       0:00:37.487 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Adopt manager to cluster] ****************************",
                                "Sunday 01 March 2026  21:05:49 +0000 (0:00:00.042)       0:00:37.529 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Enable \"cephadm\" mgr module] *************************",
                                "Sunday 01 March 2026  21:05:49 +0000 (0:00:00.041)       0:00:37.571 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Set orchestrator backend to \"cephadm\"] ***************",
                                "Sunday 01 March 2026  21:05:49 +0000 (0:00:00.044)       0:00:37.615 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Use `cephadm` user for cephadm] **********************",
                                "Sunday 01 March 2026  21:05:49 +0000 (0:00:00.040)       0:00:37.655 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Generate \"cephadm\" key] ******************************",
                                "Sunday 01 March 2026  21:05:49 +0000 (0:00:00.040)       0:00:37.696 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Set Ceph Monitor IP address] *************************",
                                "Sunday 01 March 2026  21:05:50 +0000 (0:00:00.043)       0:00:37.739 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Check if any node is bootstrapped] *******************",
                                "Sunday 01 March 2026  21:05:50 +0000 (0:00:00.119)       0:00:37.858 **********",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.ceph.mon : Select pre-existing bootstrap node if exists] ********",
                                "Sunday 01 March 2026  21:05:50 +0000 (0:00:00.209)       0:00:38.068 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Bootstrap cluster] ***********************************",
                                "Sunday 01 March 2026  21:05:50 +0000 (0:00:00.052)       0:00:38.121 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/mon/tasks/bootstrap-ceph.yml for instance",
                                "",
                                "TASK [vexxhost.ceph.mon : Generate temporary file for \"ceph.conf\"] *************",
                                "Sunday 01 March 2026  21:05:50 +0000 (0:00:00.073)       0:00:38.194 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Include extra configuration values] ******************",
                                "Sunday 01 March 2026  21:05:50 +0000 (0:00:00.343)       0:00:38.538 **********",
                                "changed: [instance] => (item={'option': 'mon allow pool size one', 'section': 'global', 'value': True})",
                                "changed: [instance] => (item={'option': 'osd crush chooseleaf type', 'section': 'global', 'value': 0})",
                                "changed: [instance] => (item={'option': 'auth allow insecure global id reclaim', 'section': 'mon', 'value': False})",
                                "",
                                "TASK [vexxhost.ceph.mon : Run Bootstrap coomand] *******************************",
                                "Sunday 01 March 2026  21:05:51 +0000 (0:00:00.697)       0:00:39.235 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Remove temporary file for \"ceph.conf\"] ***************",
                                "Sunday 01 March 2026  21:07:55 +0000 (0:02:03.512)       0:02:42.748 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Set bootstrap node] **********************************",
                                "Sunday 01 March 2026  21:07:55 +0000 (0:00:00.203)       0:02:42.951 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Install Ceph host] *******************************************************",
                                "Sunday 01 March 2026  21:07:55 +0000 (0:00:00.043)       0:02:42.995 **********",
                                "included: vexxhost.ceph.cephadm_host for instance",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******",
                                "Sunday 01 March 2026  21:07:55 +0000 (0:00:00.081)       0:02:43.076 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***",
                                "Sunday 01 March 2026  21:07:56 +0000 (0:00:01.584)       0:02:44.661 **********",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********",
                                "Sunday 01 March 2026  21:07:56 +0000 (0:00:00.063)       0:02:44.724 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************",
                                "Sunday 01 March 2026  21:07:57 +0000 (0:00:00.411)       0:02:45.135 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Configure \"mon\" label for monitors] ******************",
                                "Sunday 01 March 2026  21:07:59 +0000 (0:00:01.901)       0:02:47.037 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Validate monitor exist] ******************************",
                                "Sunday 01 March 2026  21:08:00 +0000 (0:00:01.618)       0:02:48.656 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Install Ceph host] *******************************************************",
                                "Sunday 01 March 2026  21:08:11 +0000 (0:00:10.382)       0:02:59.038 **********",
                                "included: vexxhost.ceph.cephadm_host for instance",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******",
                                "Sunday 01 March 2026  21:08:11 +0000 (0:00:00.076)       0:02:59.115 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***",
                                "Sunday 01 March 2026  21:08:11 +0000 (0:00:00.053)       0:02:59.169 **********",
                                "skipping: [instance] => (item=instance)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********",
                                "Sunday 01 March 2026  21:08:11 +0000 (0:00:00.049)       0:02:59.218 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************",
                                "Sunday 01 March 2026  21:08:11 +0000 (0:00:00.259)       0:02:59.477 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mgr : Configure \"mgr\" label for managers] ******************",
                                "Sunday 01 March 2026  21:08:13 +0000 (0:00:01.978)       0:03:01.456 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mgr : Validate manager exist] ******************************",
                                "Sunday 01 March 2026  21:08:15 +0000 (0:00:01.652)       0:03:03.108 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mgr : Enable the Ceph Manager prometheus module] ***********",
                                "Sunday 01 March 2026  21:08:16 +0000 (0:00:01.592)       0:03:04.700 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [Deploy Ceph OSDs] ********************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:08:19 +0000 (0:00:02.534)       0:03:07.235 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  21:08:20 +0000 (0:00:00.938)       0:03:08.174 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  21:08:20 +0000 (0:00:00.211)       0:03:08.385 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Sunday 01 March 2026  21:08:20 +0000 (0:00:00.047)       0:03:08.433 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:08:20 +0000 (0:00:00.222)       0:03:08.656 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:08:20 +0000 (0:00:00.061)       0:03:08.718 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:08:21 +0000 (0:00:00.308)       0:03:09.026 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  21:08:21 +0000 (0:00:00.056)       0:03:09.083 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  21:08:21 +0000 (0:00:00.051)       0:03:09.134 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  21:08:21 +0000 (0:00:00.210)       0:03:09.345 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:08:22 +0000 (0:00:01.278)       0:03:10.623 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:08:22 +0000 (0:00:00.069)       0:03:10.693 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:08:23 +0000 (0:00:00.333)       0:03:11.026 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Sunday 01 March 2026  21:08:25 +0000 (0:00:01.890)       0:03:12.916 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Sunday 01 March 2026  21:08:25 +0000 (0:00:00.034)       0:03:12.951 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Sunday 01 March 2026  21:08:25 +0000 (0:00:00.036)       0:03:12.988 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Sunday 01 March 2026  21:08:25 +0000 (0:00:00.034)       0:03:13.022 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Sunday 01 March 2026  21:08:26 +0000 (0:00:01.168)       0:03:14.190 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Sunday 01 March 2026  21:08:26 +0000 (0:00:00.427)       0:03:14.617 **********",
                                "ok: [instance] => (item={'path': '/etc/containerd'})",
                                "ok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Sunday 01 March 2026  21:08:27 +0000 (0:00:00.882)       0:03:15.500 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Sunday 01 March 2026  21:08:28 +0000 (0:00:00.481)       0:03:15.981 **********",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Sunday 01 March 2026  21:08:28 +0000 (0:00:00.006)       0:03:15.988 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  21:08:28 +0000 (0:00:00.342)       0:03:16.330 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:08:28 +0000 (0:00:00.195)       0:03:16.526 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:08:28 +0000 (0:00:00.052)       0:03:16.578 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:08:29 +0000 (0:00:00.336)       0:03:16.915 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Install AppArmor packages] ******************",
                                "Sunday 01 March 2026  21:08:32 +0000 (0:00:03.030)       0:03:19.945 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************",
                                "Sunday 01 March 2026  21:08:33 +0000 (0:00:01.162)       0:03:21.107 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd service file for docker] *****",
                                "Sunday 01 March 2026  21:08:33 +0000 (0:00:00.194)       0:03:21.302 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create folders for configuration] ***********",
                                "Sunday 01 March 2026  21:08:33 +0000 (0:00:00.392)       0:03:21.694 **********",
                                "ok: [instance] => (item={'path': '/etc/docker'})",
                                "ok: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})",
                                "ok: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd socket file for docker] ******",
                                "Sunday 01 March 2026  21:08:34 +0000 (0:00:00.521)       0:03:22.215 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create docker daemon config file] ***********",
                                "Sunday 01 March 2026  21:08:34 +0000 (0:00:00.390)       0:03:22.606 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Force any restarts if necessary] ************",
                                "Sunday 01 March 2026  21:08:35 +0000 (0:00:00.415)       0:03:23.022 **********",
                                "",
                                "TASK [vexxhost.containers.docker : Enable and start service] *******************",
                                "Sunday 01 March 2026  21:08:35 +0000 (0:00:00.006)       0:03:23.028 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******",
                                "Sunday 01 March 2026  21:08:35 +0000 (0:00:00.344)       0:03:23.373 **********",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Install packages] ********************************",
                                "Sunday 01 March 2026  21:08:35 +0000 (0:00:00.059)       0:03:23.432 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure services are started] *********************",
                                "Sunday 01 March 2026  21:08:36 +0000 (0:00:01.216)       0:03:24.649 **********",
                                "ok: [instance] => (item=chronyd)",
                                "ok: [instance] => (item=sshd)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************",
                                "Sunday 01 March 2026  21:08:37 +0000 (0:00:00.662)       0:03:25.312 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************",
                                "Sunday 01 March 2026  21:08:37 +0000 (0:00:00.299)       0:03:25.611 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************",
                                "Sunday 01 March 2026  21:08:38 +0000 (0:00:00.202)       0:03:25.813 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***",
                                "Sunday 01 March 2026  21:08:38 +0000 (0:00:00.235)       0:03:26.049 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get monitor status] **********************************",
                                "Sunday 01 March 2026  21:08:38 +0000 (0:00:00.208)       0:03:26.258 **********",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.ceph.osd : Select admin host] ***********************************",
                                "Sunday 01 March 2026  21:08:38 +0000 (0:00:00.217)       0:03:26.476 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get `cephadm ls` status] *****************************",
                                "Sunday 01 March 2026  21:08:38 +0000 (0:00:00.048)       0:03:26.524 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Parse the `cephadm ls` output] ***********************",
                                "Sunday 01 March 2026  21:08:43 +0000 (0:00:04.273)       0:03:30.797 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Install Ceph host] *******************************************************",
                                "Sunday 01 March 2026  21:08:43 +0000 (0:00:00.049)       0:03:30.847 **********",
                                "included: vexxhost.ceph.cephadm_host for instance",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******",
                                "Sunday 01 March 2026  21:08:43 +0000 (0:00:00.070)       0:03:30.917 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***",
                                "Sunday 01 March 2026  21:08:43 +0000 (0:00:00.049)       0:03:30.966 **********",
                                "skipping: [instance] => (item=instance)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********",
                                "Sunday 01 March 2026  21:08:43 +0000 (0:00:00.047)       0:03:31.014 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************",
                                "Sunday 01 March 2026  21:08:43 +0000 (0:00:00.256)       0:03:31.270 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Adopt OSDs to cluster] *******************************",
                                "Sunday 01 March 2026  21:08:45 +0000 (0:00:01.998)       0:03:33.268 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Wait until OSD added to cephadm] *********************",
                                "Sunday 01 March 2026  21:08:45 +0000 (0:00:00.028)       0:03:33.296 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Ensure all OSDs are non-legacy] **********************",
                                "Sunday 01 March 2026  21:08:45 +0000 (0:00:00.026)       0:03:33.322 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get `ceph-volume lvm list` status] *******************",
                                "Sunday 01 March 2026  21:08:50 +0000 (0:00:05.279)       0:03:38.602 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Install OSDs] ****************************************",
                                "Sunday 01 March 2026  21:09:01 +0000 (0:00:10.346)       0:03:48.949 **********",
                                "ok: [instance] => (item=/dev/ceph-instance-osd0/data)",
                                "ok: [instance] => (item=/dev/ceph-instance-osd1/data)",
                                "ok: [instance] => (item=/dev/ceph-instance-osd2/data)",
                                "",
                                "TASK [vexxhost.ceph.osd : Get mon dump] ****************************************",
                                "Sunday 01 March 2026  21:10:29 +0000 (0:01:27.985)       0:05:16.935 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Mark require osd release] ****************************",
                                "Sunday 01 March 2026  21:10:30 +0000 (0:00:01.649)       0:05:18.584 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Wait for all OSD to be running] **********************",
                                "Sunday 01 March 2026  21:10:32 +0000 (0:00:01.666)       0:05:20.251 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/osd/tasks/check-osds.yml for instance",
                                "",
                                "TASK [vexxhost.ceph.osd : Set the retry count] *********************************",
                                "Sunday 01 March 2026  21:10:32 +0000 (0:00:00.061)       0:05:20.312 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get `ceph orch ps`] **********************************",
                                "Sunday 01 March 2026  21:10:32 +0000 (0:00:00.044)       0:05:20.357 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : OSD daemon list] *************************************",
                                "Sunday 01 March 2026  21:10:34 +0000 (0:00:01.612)       0:05:21.969 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Fail if any OSD not running] *************************",
                                "Sunday 01 March 2026  21:10:34 +0000 (0:00:00.048)       0:05:22.017 **********",
                                "skipping: [instance] => (item=1)",
                                "skipping: [instance] => (item=1)",
                                "skipping: [instance] => (item=1)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Fail if any duplicate OSD ID] ************************",
                                "Sunday 01 March 2026  21:10:34 +0000 (0:00:00.055)       0:05:22.073 **********",
                                "skipping: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Ensure RBD kernel module is loaded] **************************************",
                                "Sunday 01 March 2026  21:10:34 +0000 (0:00:00.043)       0:05:22.116 **********",
                                "changed: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:10:34 +0000 (0:00:00.367)       0:05:22.484 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.sysctl : Configure sysctl values] ********************",
                                "Sunday 01 March 2026  21:10:35 +0000 (0:00:00.940)       0:05:23.425 **********",
                                "changed: [instance] => (item={'name': 'fs.aio-max-nr', 'value': 1048576})",
                                "changed: [instance] => (item={'name': 'net.ipv4.tcp_timestamps', 'value': 0})",
                                "changed: [instance] => (item={'name': 'net.ipv4.tcp_sack', 'value': 1})",
                                "changed: [instance] => (item={'name': 'net.core.netdev_budget', 'value': 1000})",
                                "changed: [instance] => (item={'name': 'net.core.netdev_max_backlog', 'value': 250000})",
                                "changed: [instance] => (item={'name': 'net.core.rmem_max', 'value': 4194304})",
                                "changed: [instance] => (item={'name': 'net.core.wmem_max', 'value': 4194304})",
                                "changed: [instance] => (item={'name': 'net.core.rmem_default', 'value': 4194304})",
                                "changed: [instance] => (item={'name': 'net.core.wmem_default', 'value': 4194304})",
                                "changed: [instance] => (item={'name': 'net.core.optmem_max', 'value': 4194304})",
                                "changed: [instance] => (item={'name': 'net.ipv4.tcp_rmem', 'value': '4096 87380 4194304'})",
                                "changed: [instance] => (item={'name': 'net.ipv4.tcp_wmem', 'value': '4096 65536 4194304'})",
                                "changed: [instance] => (item={'name': 'net.ipv4.tcp_low_latency', 'value': 1})",
                                "changed: [instance] => (item={'name': 'net.ipv4.tcp_adv_win_scale', 'value': 1})",
                                "changed: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh1', 'value': 128})",
                                "changed: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh2', 'value': 28872})",
                                "changed: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh3', 'value': 32768})",
                                "changed: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh1', 'value': 128})",
                                "changed: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh2', 'value': 28872})",
                                "changed: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh3', 'value': 32768})",
                                "",
                                "TASK [vexxhost.atmosphere.ethtool : Create folder for persistent configuration] ***",
                                "Sunday 01 March 2026  21:10:39 +0000 (0:00:03.602)       0:05:27.027 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ethtool : Install persistent \"ethtool\" tuning] *******",
                                "Sunday 01 March 2026  21:10:39 +0000 (0:00:00.186)       0:05:27.214 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ethtool : Run \"ethtool\" tuning] **********************",
                                "Sunday 01 March 2026  21:10:39 +0000 (0:00:00.436)       0:05:27.650 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Set a fact with the \"atmosphere_images\" for other plays] *****************",
                                "Sunday 01 March 2026  21:10:40 +0000 (0:00:00.231)       0:05:27.881 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [Configure Kubernetes VIP] ************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:10:40 +0000 (0:00:00.050)       0:05:27.932 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/etc/kubernetes/manifests)] ***",
                                "Sunday 01 March 2026  21:10:41 +0000 (0:00:00.918)       0:05:28.850 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Uninstall legacy HA stack] ****************",
                                "Sunday 01 March 2026  21:10:41 +0000 (0:00:00.205)       0:05:29.056 **********",
                                "ok: [instance] => (item=/etc/keepalived/keepalived.conf)",
                                "ok: [instance] => (item=/etc/keepalived/check_apiserver.sh)",
                                "ok: [instance] => (item=/etc/kubernetes/manifests/keepalived.yaml)",
                                "ok: [instance] => (item=/etc/haproxy/haproxy.cfg)",
                                "ok: [instance] => (item=/etc/kubernetes/manifests/haproxy.yaml)",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Switch API server to run on port 6443] ****",
                                "Sunday 01 March 2026  21:10:42 +0000 (0:00:00.843)       0:05:29.900 **********",
                                "failed: [instance] (item=/etc/kubernetes/manifests/kube-apiserver.yaml) => {\"ansible_loop_var\": \"item\", \"changed\": false, \"item\": \"/etc/kubernetes/manifests/kube-apiserver.yaml\", \"msg\": \"Path /etc/kubernetes/manifests/kube-apiserver.yaml does not exist !\", \"rc\": 257}",
                                "failed: [instance] (item=/etc/kubernetes/controller-manager.conf) => {\"ansible_loop_var\": \"item\", \"changed\": false, \"item\": \"/etc/kubernetes/controller-manager.conf\", \"msg\": \"Path /etc/kubernetes/controller-manager.conf does not exist !\", \"rc\": 257}",
                                "failed: [instance] (item=/etc/kubernetes/scheduler.conf) => {\"ansible_loop_var\": \"item\", \"changed\": false, \"item\": \"/etc/kubernetes/scheduler.conf\", \"msg\": \"Path /etc/kubernetes/scheduler.conf does not exist !\", \"rc\": 257}",
                                "...ignoring",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Check if super-admin.conf exists] *********",
                                "Sunday 01 March 2026  21:10:42 +0000 (0:00:00.505)       0:05:30.406 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Check if kubeadm has already run] *********",
                                "Sunday 01 March 2026  21:10:42 +0000 (0:00:00.196)       0:05:30.603 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path] ************",
                                "Sunday 01 March 2026  21:10:43 +0000 (0:00:00.185)       0:05:30.788 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path (with super-admin.conf)] ***",
                                "Sunday 01 March 2026  21:10:43 +0000 (0:00:00.036)       0:05:30.825 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Upload Kubernetes manifest] ***************",
                                "Sunday 01 March 2026  21:10:43 +0000 (0:00:00.040)       0:05:30.865 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Ensure kube-vip configuration file] *******",
                                "Sunday 01 March 2026  21:10:43 +0000 (0:00:00.521)       0:05:31.387 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Flush handlers] ***************************",
                                "Sunday 01 March 2026  21:10:43 +0000 (0:00:00.191)       0:05:31.578 **********",
                                "",
                                "PLAY [Install Kubernetes] ******************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:10:43 +0000 (0:00:00.056)       0:05:31.634 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  21:10:44 +0000 (0:00:00.885)       0:05:32.520 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  21:10:44 +0000 (0:00:00.209)       0:05:32.730 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Sunday 01 March 2026  21:10:45 +0000 (0:00:00.046)       0:05:32.777 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:10:45 +0000 (0:00:00.208)       0:05:32.985 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:10:45 +0000 (0:00:00.060)       0:05:33.045 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:10:45 +0000 (0:00:00.301)       0:05:33.347 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  21:10:45 +0000 (0:00:00.055)       0:05:33.403 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  21:10:45 +0000 (0:00:00.198)       0:05:33.602 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:10:46 +0000 (0:00:01.004)       0:05:34.606 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:10:46 +0000 (0:00:00.069)       0:05:34.675 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:10:47 +0000 (0:00:00.332)       0:05:35.007 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Sunday 01 March 2026  21:10:49 +0000 (0:00:01.915)       0:05:36.923 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Sunday 01 March 2026  21:10:49 +0000 (0:00:00.034)       0:05:36.958 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Sunday 01 March 2026  21:10:49 +0000 (0:00:00.038)       0:05:36.996 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Sunday 01 March 2026  21:10:49 +0000 (0:00:00.180)       0:05:37.177 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Sunday 01 March 2026  21:10:50 +0000 (0:00:00.958)       0:05:38.136 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Sunday 01 March 2026  21:10:50 +0000 (0:00:00.438)       0:05:38.574 **********",
                                "ok: [instance] => (item={'path': '/etc/containerd'})",
                                "ok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Sunday 01 March 2026  21:10:51 +0000 (0:00:00.905)       0:05:39.479 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Sunday 01 March 2026  21:10:52 +0000 (0:00:00.469)       0:05:39.949 **********",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Sunday 01 March 2026  21:10:52 +0000 (0:00:00.007)       0:05:39.957 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the \"kubeadm-config\" ConfigMap] ***",
                                "Sunday 01 March 2026  21:10:52 +0000 (0:00:00.345)       0:05:40.303 **********",
                                "fatal: [instance]: FAILED! => {\"changed\": false, \"msg\": \"Failed to import the required Python library (kubernetes) on instance's Python /usr/bin/python3.10. Please read the module documentation and install it in the appropriate location. If the required library is installed, but Ansible is using the wrong Python interpreter, please consult the documentation on ansible_python_interpreter\"}",
                                "...ignoring",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Parse the ClusterConfiguration] ***",
                                "Sunday 01 March 2026  21:10:53 +0000 (0:00:00.585)       0:05:40.888 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the current Kubernetes version] ***",
                                "Sunday 01 March 2026  21:10:53 +0000 (0:00:00.033)       0:05:40.922 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Extract major, minor, and patch versions] ***",
                                "Sunday 01 March 2026  21:10:53 +0000 (0:00:00.043)       0:05:40.965 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Fail if we're jumping more than one minor version] ***",
                                "Sunday 01 March 2026  21:10:53 +0000 (0:00:00.037)       0:05:41.003 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Set fact if we need to upgrade] ***",
                                "Sunday 01 March 2026  21:10:53 +0000 (0:00:00.038)       0:05:41.041 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  21:10:53 +0000 (0:00:00.042)       0:05:41.083 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:10:53 +0000 (0:00:00.210)       0:05:41.294 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubeadm\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:10:53 +0000 (0:00:00.049)       0:05:41.343 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:10:54 +0000 (0:00:00.678)       0:05:42.021 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  21:10:54 +0000 (0:00:00.048)       0:05:42.069 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:10:54 +0000 (0:00:00.201)       0:05:42.271 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubectl\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:10:54 +0000 (0:00:00.050)       0:05:42.321 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:10:55 +0000 (0:00:01.093)       0:05:43.415 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Sunday 01 March 2026  21:10:55 +0000 (0:00:00.063)       0:05:43.478 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Sunday 01 March 2026  21:10:55 +0000 (0:00:00.043)       0:05:43.522 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Sunday 01 March 2026  21:10:55 +0000 (0:00:00.037)       0:05:43.559 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Sunday 01 March 2026  21:10:55 +0000 (0:00:00.040)       0:05:43.600 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Sunday 01 March 2026  21:10:56 +0000 (0:00:01.123)       0:05:44.723 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Sunday 01 March 2026  21:10:57 +0000 (0:00:00.443)       0:05:45.166 **********",
                                "ok: [instance] => (item={'path': '/etc/containerd'})",
                                "ok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Sunday 01 March 2026  21:10:58 +0000 (0:00:00.893)       0:05:46.059 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Sunday 01 March 2026  21:10:58 +0000 (0:00:00.481)       0:05:46.541 **********",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Sunday 01 March 2026  21:10:58 +0000 (0:00:00.007)       0:05:46.549 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  21:10:59 +0000 (0:00:00.357)       0:05:46.906 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:10:59 +0000 (0:00:00.210)       0:05:47.116 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/crictl-v1.34.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:10:59 +0000 (0:00:00.060)       0:05:47.176 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:11:00 +0000 (0:00:00.622)       0:05:47.798 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:11:01 +0000 (0:00:01.340)       0:05:49.139 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/critest-v1.34.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:11:01 +0000 (0:00:00.054)       0:05:49.194 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:11:02 +0000 (0:00:00.675)       0:05:49.869 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.cri_tools : Create crictl config] ********************",
                                "Sunday 01 March 2026  21:11:03 +0000 (0:00:01.394)       0:05:51.264 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/opt/cni/bin)] *********",
                                "Sunday 01 March 2026  21:11:03 +0000 (0:00:00.450)       0:05:51.714 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  21:11:04 +0000 (0:00:00.222)       0:05:51.937 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:11:04 +0000 (0:00:00.204)       0:05:52.142 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containernetworking/plugins/releases/download/v1.8.0/cni-plugins-linux-amd64-v1.8.0.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:11:04 +0000 (0:00:00.060)       0:05:52.202 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:11:05 +0000 (0:00:00.814)       0:05:53.017 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Gather variables for each operating system] ***",
                                "Sunday 01 March 2026  21:11:07 +0000 (0:00:02.674)       0:05:55.691 **********",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/containers/roles/cni_plugins/vars/debian.yml)",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Install additional packages] ***********",
                                "Sunday 01 March 2026  21:11:08 +0000 (0:00:00.051)       0:05:55.742 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Ensure IPv6 is enabled] ****************",
                                "Sunday 01 March 2026  21:11:08 +0000 (0:00:00.973)       0:05:56.715 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Enable kernel modules on-boot] *********",
                                "Sunday 01 March 2026  21:11:09 +0000 (0:00:00.194)       0:05:56.910 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Enable kernel modules in runtime] ******",
                                "Sunday 01 March 2026  21:11:09 +0000 (0:00:00.441)       0:05:57.351 **********",
                                "changed: [instance] => (item=br_netfilter)",
                                "ok: [instance] => (item=ip_tables)",
                                "changed: [instance] => (item=ip6_tables)",
                                "ok: [instance] => (item=nf_conntrack)",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  21:11:10 +0000 (0:00:00.726)       0:05:58.077 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:11:10 +0000 (0:00:00.199)       0:05:58.277 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubelet\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:11:10 +0000 (0:00:00.052)       0:05:58.329 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:11:12 +0000 (0:00:01.632)       0:05:59.962 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Gather variables for each operating system] ***",
                                "Sunday 01 March 2026  21:11:12 +0000 (0:00:00.038)       0:06:00.001 **********",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubelet/vars/debian.yml)",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Install coreutils] *************************",
                                "Sunday 01 March 2026  21:11:12 +0000 (0:00:00.063)       0:06:00.064 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Install additional packages] ***************",
                                "Sunday 01 March 2026  21:11:12 +0000 (0:00:00.040)       0:06:00.104 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Configure sysctl values] *******************",
                                "Sunday 01 March 2026  21:11:16 +0000 (0:00:03.940)       0:06:04.045 **********",
                                "changed: [instance] => (item={'name': 'net.ipv4.ip_forward', 'value': 1})",
                                "changed: [instance] => (item={'name': 'net.bridge.bridge-nf-call-iptables', 'value': 1})",
                                "changed: [instance] => (item={'name': 'net.bridge.bridge-nf-call-ip6tables', 'value': 1})",
                                "changed: [instance] => (item={'name': 'net.ipv4.conf.all.rp_filter', 'value': 0})",
                                "changed: [instance] => (item={'name': 'fs.inotify.max_queued_events', 'value': 1048576})",
                                "changed: [instance] => (item={'name': 'fs.inotify.max_user_instances', 'value': 8192})",
                                "changed: [instance] => (item={'name': 'fs.inotify.max_user_watches', 'value': 1048576})",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Create folders for kubernetes configuration] ***",
                                "Sunday 01 March 2026  21:11:17 +0000 (0:00:01.264)       0:06:05.309 **********",
                                "changed: [instance] => (item=/etc/systemd/system/kubelet.service.d)",
                                "ok: [instance] => (item=/etc/kubernetes)",
                                "ok: [instance] => (item=/etc/kubernetes/manifests)",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Add kubelet systemd service config] ********",
                                "Sunday 01 March 2026  21:11:18 +0000 (0:00:00.533)       0:06:05.842 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Add kubeadm dropin for kubelet systemd service config] ***",
                                "Sunday 01 March 2026  21:11:18 +0000 (0:00:00.457)       0:06:06.300 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Check swap status] *************************",
                                "Sunday 01 March 2026  21:11:19 +0000 (0:00:00.460)       0:06:06.761 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Disable swap] ******************************",
                                "Sunday 01 March 2026  21:11:19 +0000 (0:00:00.214)       0:06:06.975 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Remove swapfile from /etc/fstab] ***********",
                                "Sunday 01 March 2026  21:11:19 +0000 (0:00:00.034)       0:06:07.009 **********",
                                "ok: [instance] => (item=swap)",
                                "ok: [instance] => (item=none)",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Create noswap systemd service config file] ***",
                                "Sunday 01 March 2026  21:11:19 +0000 (0:00:00.557)       0:06:07.566 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Enable noswap service] *********************",
                                "Sunday 01 March 2026  21:11:20 +0000 (0:00:00.471)       0:06:08.038 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Force any restarts if necessary] ***********",
                                "Sunday 01 March 2026  21:11:20 +0000 (0:00:00.567)       0:06:08.606 **********",
                                "",
                                "RUNNING HANDLER [vexxhost.kubernetes.kubelet : Reload systemd] *****************",
                                "Sunday 01 March 2026  21:11:20 +0000 (0:00:00.008)       0:06:08.614 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Enable and start kubelet service] **********",
                                "Sunday 01 March 2026  21:11:21 +0000 (0:00:00.752)       0:06:09.366 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Ensure availability of dbus on Debian] *****",
                                "Sunday 01 March 2026  21:11:22 +0000 (0:00:00.583)       0:06:09.949 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Configure short hostname] ******************",
                                "Sunday 01 March 2026  21:11:23 +0000 (0:00:00.961)       0:06:10.911 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Ensure hostname inside hosts file] *********",
                                "Sunday 01 March 2026  21:11:23 +0000 (0:00:00.729)       0:06:11.641 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Remove kubernetes repository] ***********",
                                "Sunday 01 March 2026  21:11:24 +0000 (0:00:00.217)       0:06:11.858 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Setup control plane] ********************",
                                "Sunday 01 March 2026  21:11:24 +0000 (0:00:00.528)       0:06:12.386 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/control-plane.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Bootstrap cluster] **********************",
                                "Sunday 01 March 2026  21:11:24 +0000 (0:00:00.100)       0:06:12.487 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/bootstrap-cluster.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Check if any control plane is bootstrapped] ***",
                                "Sunday 01 March 2026  21:11:24 +0000 (0:00:00.090)       0:06:12.578 **********",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Pick node from pre-existing cluster] ****",
                                "Sunday 01 March 2026  21:11:25 +0000 (0:00:00.211)       0:06:12.789 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Select first node to initialize cluster] ***",
                                "Sunday 01 March 2026  21:11:25 +0000 (0:00:00.048)       0:06:12.838 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Print selected bootstrap node] **********",
                                "Sunday 01 March 2026  21:11:25 +0000 (0:00:00.056)       0:06:12.894 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"instance\"",
                                "}",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Upload cluster configuration for bootstrap node] ***",
                                "Sunday 01 March 2026  21:11:25 +0000 (0:00:00.042)       0:06:12.937 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create pki folder] **********************",
                                "Sunday 01 March 2026  21:11:25 +0000 (0:00:00.543)       0:06:13.480 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca key] ***************",
                                "Sunday 01 March 2026  21:11:25 +0000 (0:00:00.043)       0:06:13.523 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca cert] **************",
                                "Sunday 01 March 2026  21:11:25 +0000 (0:00:00.037)       0:06:13.561 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create etcd-ca key] *********************",
                                "Sunday 01 March 2026  21:11:25 +0000 (0:00:00.036)       0:06:13.597 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create etcd-ca cert] ********************",
                                "Sunday 01 March 2026  21:11:25 +0000 (0:00:00.039)       0:06:13.637 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca key] **************",
                                "Sunday 01 March 2026  21:11:25 +0000 (0:00:00.039)       0:06:13.677 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca cert] *************",
                                "Sunday 01 March 2026  21:11:25 +0000 (0:00:00.035)       0:06:13.713 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Initialize cluster] *********************",
                                "Sunday 01 March 2026  21:11:26 +0000 (0:00:00.035)       0:06:13.748 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Check if the node is already part of the cluster] ***",
                                "Sunday 01 March 2026  21:11:46 +0000 (0:00:20.691)       0:06:34.440 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Join cluster] ***************************",
                                "Sunday 01 March 2026  21:11:46 +0000 (0:00:00.233)       0:06:34.673 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create folder for admin configuration] ***",
                                "Sunday 01 March 2026  21:11:46 +0000 (0:00:00.040)       0:06:34.714 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Creating a symlink for admin configuration file] ***",
                                "Sunday 01 March 2026  21:11:47 +0000 (0:00:00.215)       0:06:34.930 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Add bash autocomplete for kubectl] ******",
                                "Sunday 01 March 2026  21:11:47 +0000 (0:00:00.214)       0:06:35.145 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Install PIP] ****************************",
                                "Sunday 01 March 2026  21:11:47 +0000 (0:00:00.198)       0:06:35.344 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using pip on supported systems] ***",
                                "Sunday 01 March 2026  21:11:51 +0000 (0:00:03.886)       0:06:39.231 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Enable EPEL repository] *****************",
                                "Sunday 01 March 2026  21:11:54 +0000 (0:00:03.140)       0:06:42.371 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using package manager on supported systems] ***",
                                "Sunday 01 March 2026  21:11:54 +0000 (0:00:00.048)       0:06:42.420 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Allow workload on control plane node] ***",
                                "Sunday 01 March 2026  21:11:54 +0000 (0:00:00.049)       0:06:42.470 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Remove kube-proxy resources] ************",
                                "Sunday 01 March 2026  21:11:55 +0000 (0:00:00.727)       0:06:43.197 **********",
                                "skipping: [instance] => (item=DaemonSet)",
                                "skipping: [instance] => (item=ConfigMap)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Start an upgrade] ***********************",
                                "Sunday 01 March 2026  21:11:55 +0000 (0:00:00.040)       0:06:43.238 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Check if the Kubernetes API services is up to date] ***",
                                "Sunday 01 March 2026  21:11:55 +0000 (0:00:00.044)       0:06:43.282 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Trigger an upgrade of the Kubernetes API services] ***",
                                "Sunday 01 March 2026  21:11:55 +0000 (0:00:00.044)       0:06:43.327 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Setup nodes] ****************************",
                                "Sunday 01 March 2026  21:11:55 +0000 (0:00:00.054)       0:06:43.382 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Upgrade & restart Kubelet node for upgrade (if needed)] ***",
                                "Sunday 01 March 2026  21:11:55 +0000 (0:00:00.044)       0:06:43.426 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Set node selector for CoreDNS components] ***",
                                "Sunday 01 March 2026  21:11:56 +0000 (0:00:00.486)       0:06:43.912 **********",
                                "changed: [instance]",
                                "",
                                "PLAY [Install control-plane components] ****************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:11:57 +0000 (0:00:00.953)       0:06:44.866 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  21:11:58 +0000 (0:00:00.988)       0:06:45.854 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  21:11:58 +0000 (0:00:00.201)       0:06:46.056 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Sunday 01 March 2026  21:11:59 +0000 (0:00:01.152)       0:06:47.209 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:11:59 +0000 (0:00:00.216)       0:06:47.425 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://get.helm.sh/helm-v3.11.2-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:11:59 +0000 (0:00:00.046)       0:06:47.472 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:12:00 +0000 (0:00:00.607)       0:06:48.080 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Remove Helm repository] ***********************",
                                "Sunday 01 March 2026  21:12:01 +0000 (0:00:01.504)       0:06:49.584 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Add bash autocomplete for helm] ***************",
                                "Sunday 01 March 2026  21:12:02 +0000 (0:00:00.325)       0:06:49.909 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Get Helm plugins dir] *************************",
                                "Sunday 01 March 2026  21:12:02 +0000 (0:00:00.208)       0:06:50.118 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Create Helm plugins directory if it does not exist] ***",
                                "Sunday 01 March 2026  21:12:02 +0000 (0:00:00.284)       0:06:50.403 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Remove Helm diff plugin installed by kubernetes.core.helm_plugin] ***",
                                "Sunday 01 March 2026  21:12:02 +0000 (0:00:00.195)       0:06:50.599 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Install plugin] **********************************************************",
                                "Sunday 01 March 2026  21:12:03 +0000 (0:00:00.211)       0:06:50.810 **********",
                                "included: vexxhost.containers.download_artifact for instance",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:12:03 +0000 (0:00:00.053)       0:06:50.863 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/databus23/helm-diff/releases/download/v3.8.1/helm-diff-linux-amd64.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:12:03 +0000 (0:00:00.048)       0:06:50.911 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:12:03 +0000 (0:00:00.645)       0:06:51.557 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:12:05 +0000 (0:00:01.777)       0:06:53.334 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:12:05 +0000 (0:00:00.041)       0:06:53.376 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cilium : Get current Kubernetes version] *************",
                                "Sunday 01 March 2026  21:12:06 +0000 (0:00:00.647)       0:06:54.024 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cilium : Deploy Helm chart] **************************",
                                "Sunday 01 March 2026  21:12:07 +0000 (0:00:00.857)       0:06:54.882 **********",
                                "changed: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:12:09 +0000 (0:00:02.093)       0:06:56.975 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kubernetes_node_labels : Add labels to node] *********",
                                "Sunday 01 March 2026  21:12:10 +0000 (0:00:01.046)       0:06:58.022 **********",
                                "changed: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Uninstall unattended-upgrades] *******************************************",
                                "Sunday 01 March 2026  21:12:10 +0000 (0:00:00.688)       0:06:58.710 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [controllers] *************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:12:11 +0000 (0:00:00.696)       0:06:59.407 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:12:12 +0000 (0:00:01.075)       0:07:00.482 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:12:12 +0000 (0:00:00.038)       0:07:00.520 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.local_path_provisioner : Deploy Helm chart] **********",
                                "Sunday 01 March 2026  21:12:13 +0000 (0:00:00.463)       0:07:00.983 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:12:14 +0000 (0:00:01.284)       0:07:02.268 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Collect \"ceph mon dump\" output from a monitor] ***",
                                "Sunday 01 March 2026  21:12:14 +0000 (0:00:00.038)       0:07:02.307 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Generate fact with list of Ceph monitors] ***",
                                "Sunday 01 March 2026  21:12:14 +0000 (0:00:00.041)       0:07:02.348 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Create Ceph pool] *********************",
                                "Sunday 01 March 2026  21:12:14 +0000 (0:00:00.029)       0:07:02.377 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Create user client.kube] **************",
                                "Sunday 01 March 2026  21:12:14 +0000 (0:00:00.045)       0:07:02.423 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Retrieve keyring for client.kube] *****",
                                "Sunday 01 March 2026  21:12:14 +0000 (0:00:00.049)       0:07:02.472 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Store keyring inside fact] ************",
                                "Sunday 01 March 2026  21:12:14 +0000 (0:00:00.036)       0:07:02.509 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Deploy Helm chart] ********************",
                                "Sunday 01 March 2026  21:12:14 +0000 (0:00:00.035)       0:07:02.544 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Clone PowerStore CSI from GitHub] ***",
                                "Sunday 01 March 2026  21:12:14 +0000 (0:00:00.037)       0:07:02.582 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Create Secret] **********************",
                                "Sunday 01 March 2026  21:12:14 +0000 (0:00:00.035)       0:07:02.618 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Create StorageClass] ****************",
                                "Sunday 01 March 2026  21:12:14 +0000 (0:00:00.030)       0:07:02.649 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Deploy Helm chart] ******************",
                                "Sunday 01 March 2026  21:12:14 +0000 (0:00:00.030)       0:07:02.679 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Add backports PPA] **********************",
                                "Sunday 01 March 2026  21:12:14 +0000 (0:00:00.042)       0:07:02.721 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Install the multipathd package] *********",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.030)       0:07:02.752 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Install the configuration file] *********",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.028)       0:07:02.780 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Detect if InitiatorName is set] ***********",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.033)       0:07:02.813 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Generate a new InitiatorName] *************",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.030)       0:07:02.844 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Write the new InitiatorName] **************",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.032)       0:07:02.876 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Create namespace] *************************",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.035)       0:07:02.911 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Install Portworx] *************************",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.034)       0:07:02.946 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Wait till the CRDs are created] ***********",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.029)       0:07:02.975 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Create Portworx Storage Cluster] **********",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.032)       0:07:03.008 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.storpool_csi : Deploy CSI RBAC] **********************",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.043)       0:07:03.052 **********",
                                "skipping: [instance] => (item={'name': 'controllerplugin'})",
                                "skipping: [instance] => (item={'name': 'nodeplugin'})",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.storpool_csi : Deploy CSI] ***************************",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.036)       0:07:03.088 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.storpool_csi : Create StorageClass] ******************",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.031)       0:07:03.120 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ibm_block_csi_driver : Deploy CSI] *******************",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.044)       0:07:03.164 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ibm_block_csi_driver : Create Secret] ****************",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.035)       0:07:03.200 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ibm_block_csi_driver : Create StorageClass] **********",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.031)       0:07:03.231 **********",
                                "skipping: [instance]",
                                "",
                                "PLAY [Deploy Infrastructure] ***************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:12:15 +0000 (0:00:00.068)       0:07:03.300 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Deploy Helm chart] *******************************************************",
                                "Sunday 01 March 2026  21:12:16 +0000 (0:00:00.979)       0:07:04.279 **********",
                                "included: vexxhost.kubernetes.cert_manager for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:12:16 +0000 (0:00:00.050)       0:07:04.330 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:12:16 +0000 (0:00:00.040)       0:07:04.370 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cert_manager : Deploy Helm chart] ********************",
                                "Sunday 01 March 2026  21:12:17 +0000 (0:00:00.467)       0:07:04.838 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cluster_issuer : Create self-signed cluster issuer] ***",
                                "Sunday 01 March 2026  21:12:46 +0000 (0:00:29.016)       0:07:33.854 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cluster_issuer : Import tasks for ClusterIssuer type] ***",
                                "Sunday 01 March 2026  21:12:46 +0000 (0:00:00.706)       0:07:34.561 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/atmosphere/roles/cluster_issuer/tasks/type/self-signed/main.yml for instance",
                                "",
                                "TASK [vexxhost.atmosphere.cluster_issuer : Create ClusterIssuer] ***************",
                                "Sunday 01 March 2026  21:12:46 +0000 (0:00:00.040)       0:07:34.601 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cluster_issuer : Wait till the secret is created] ****",
                                "Sunday 01 March 2026  21:12:47 +0000 (0:00:00.750)       0:07:35.352 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cluster_issuer : Copy CA certificate on host] ********",
                                "Sunday 01 March 2026  21:12:48 +0000 (0:00:00.767)       0:07:36.120 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cluster_issuer : Flush all handlers] *****************",
                                "Sunday 01 March 2026  21:12:48 +0000 (0:00:00.448)       0:07:36.569 **********",
                                "",
                                "RUNNING HANDLER [vexxhost.atmosphere.cluster_issuer : Update CA certificates on host] ***",
                                "Sunday 01 March 2026  21:12:48 +0000 (0:00:00.005)       0:07:36.574 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:12:50 +0000 (0:00:01.586)       0:07:38.161 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:12:50 +0000 (0:00:00.049)       0:07:38.211 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ingress_nginx : Deploy Helm chart] *******************",
                                "Sunday 01 March 2026  21:12:50 +0000 (0:00:00.498)       0:07:38.710 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:13:04 +0000 (0:00:13.172)       0:07:51.882 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:13:04 +0000 (0:00:00.045)       0:07:51.928 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq_cluster_operator : Install all CRDs] ********",
                                "Sunday 01 March 2026  21:13:04 +0000 (0:00:00.480)       0:07:52.408 **********",
                                "ok: [instance] => (item=messaging-topology-operator)",
                                "ok: [instance] => (item=rabbitmq-cluster)",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq_cluster_operator : Deploy Helm chart] *******",
                                "Sunday 01 March 2026  21:13:07 +0000 (0:00:02.845)       0:07:55.253 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:13:10 +0000 (0:00:02.651)       0:07:57.905 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:13:10 +0000 (0:00:00.048)       0:07:57.954 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster_operator : Install all CRDs] ***",
                                "Sunday 01 March 2026  21:13:10 +0000 (0:00:00.471)       0:07:58.425 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster_operator : Deploy Helm chart] ***",
                                "Sunday 01 March 2026  21:13:13 +0000 (0:00:03.047)       0:08:01.472 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Check if the Percona XtraDB cluster secret exists] ***",
                                "Sunday 01 March 2026  21:13:15 +0000 (0:00:01.985)       0:08:03.457 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Create a secret] ************",
                                "Sunday 01 March 2026  21:13:16 +0000 (0:00:00.639)       0:08:04.097 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Check if the Percona XtraDB cluster exists] ***",
                                "Sunday 01 March 2026  21:13:17 +0000 (0:00:00.733)       0:08:04.830 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Get current status of the cluster] ***",
                                "Sunday 01 March 2026  21:13:18 +0000 (0:00:00.929)       0:08:05.760 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Assert that the cluster is healthy before upgrade] ***",
                                "Sunday 01 March 2026  21:13:18 +0000 (0:00:00.046)       0:08:05.806 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Stop PXC-operator] **********",
                                "Sunday 01 March 2026  21:13:18 +0000 (0:00:00.050)       0:08:05.857 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Change the cluster Statefulset image to 8.0] ***",
                                "Sunday 01 March 2026  21:13:18 +0000 (0:00:00.038)       0:08:05.895 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Wait until the cluster Statefulset rollout] ***",
                                "Sunday 01 March 2026  21:13:18 +0000 (0:00:00.044)       0:08:05.940 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Update pxc cluster spec] ****",
                                "Sunday 01 March 2026  21:13:18 +0000 (0:00:00.043)       0:08:05.983 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Start PXC-operator] *********",
                                "Sunday 01 March 2026  21:13:18 +0000 (0:00:00.044)       0:08:06.028 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Apply Percona XtraDB cluster] ***",
                                "Sunday 01 March 2026  21:13:18 +0000 (0:00:00.041)       0:08:06.069 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Create percona haproxy metric service] ***",
                                "Sunday 01 March 2026  21:14:49 +0000 (0:01:31.583)       0:09:37.652 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:14:50 +0000 (0:00:00.691)       0:09:38.344 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:14:50 +0000 (0:00:00.045)       0:09:38.389 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.valkey : Create TLS resources] ***********************",
                                "Sunday 01 March 2026  21:14:51 +0000 (0:00:00.490)       0:09:38.880 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.valkey : Deploy Helm chart] **************************",
                                "Sunday 01 March 2026  21:14:51 +0000 (0:00:00.713)       0:09:39.593 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:14:53 +0000 (0:00:01.491)       0:09:41.084 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:14:53 +0000 (0:00:00.039)       0:09:41.124 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:14:53 +0000 (0:00:00.042)       0:09:41.167 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:14:53 +0000 (0:00:00.033)       0:09:41.200 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:14:53 +0000 (0:00:00.036)       0:09:41.237 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:14:54 +0000 (0:00:00.638)       0:09:41.876 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:14:54 +0000 (0:00:00.050)       0:09:41.926 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:14:54 +0000 (0:00:00.042)       0:09:41.968 **********",
                                "ok: [instance] => (item=oslo_db)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:14:54 +0000 (0:00:00.054)       0:09:42.023 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:14:54 +0000 (0:00:00.052)       0:09:42.076 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:14:54 +0000 (0:00:00.044)       0:09:42.121 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Get the Kuberentes service for Percona XtraDB Cluster] ***",
                                "Sunday 01 March 2026  21:14:54 +0000 (0:00:00.487)       0:09:42.608 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Install MySQL python package] *************",
                                "Sunday 01 March 2026  21:14:55 +0000 (0:00:00.624)       0:09:43.233 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Check MySQL ready] ************************",
                                "Sunday 01 March 2026  21:14:56 +0000 (0:00:01.041)       0:09:44.275 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Create Keycloak database] *****************",
                                "Sunday 01 March 2026  21:14:56 +0000 (0:00:00.421)       0:09:44.697 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Create a Keycloak user] *******************",
                                "Sunday 01 March 2026  21:14:57 +0000 (0:00:00.405)       0:09:45.102 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Disable pxc strict mode] ******************",
                                "Sunday 01 March 2026  21:14:57 +0000 (0:00:00.454)       0:09:45.557 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Deploy Helm chart] ************************",
                                "Sunday 01 March 2026  21:14:58 +0000 (0:00:00.398)       0:09:45.955 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Wait until keycloak ready] ****************",
                                "Sunday 01 March 2026  21:17:31 +0000 (0:02:33.358)       0:12:19.313 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Keycloak Ingress] *************************************************",
                                "Sunday 01 March 2026  21:17:32 +0000 (0:00:00.658)       0:12:19.972 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress keycloak] *******************",
                                "Sunday 01 March 2026  21:17:32 +0000 (0:00:00.042)       0:12:20.015 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Enable pxc strict mode] *******************",
                                "Sunday 01 March 2026  21:17:33 +0000 (0:00:00.907)       0:12:20.922 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keepalived : Deploy service] *************************",
                                "Sunday 01 March 2026  21:17:33 +0000 (0:00:00.260)       0:12:21.183 **********",
                                "changed: [instance]",
                                "",
                                "PLAY [Deploy Monitoring] *******************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:17:34 +0000 (0:00:00.813)       0:12:21.996 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:17:35 +0000 (0:00:01.561)       0:12:23.558 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:17:35 +0000 (0:00:00.059)       0:12:23.617 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.node_feature_discovery : Install all CRDs] ***********",
                                "Sunday 01 March 2026  21:17:36 +0000 (0:00:00.510)       0:12:24.128 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.node_feature_discovery : Deploy Helm chart] **********",
                                "Sunday 01 March 2026  21:17:37 +0000 (0:00:00.764)       0:12:24.893 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:17:39 +0000 (0:00:01.997)       0:12:26.891 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:17:39 +0000 (0:00:00.174)       0:12:27.065 **********",
                                "changed: [instance]",
                                "",
                                "TASK [atmosphere.common.secretgen_controller : Deploy secretgen-controller] ****",
                                "Sunday 01 March 2026  21:17:39 +0000 (0:00:00.589)       0:12:27.654 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Wait until Keycloak service is ready] ***",
                                "Sunday 01 March 2026  21:17:41 +0000 (0:00:01.166)       0:12:28.821 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Create Keycloak realm] *******",
                                "Sunday 01 March 2026  21:17:41 +0000 (0:00:00.718)       0:12:29.540 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Add client roles in \"id_token\"] ***",
                                "Sunday 01 March 2026  21:17:49 +0000 (0:00:08.070)       0:12:37.610 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Retrieve \"etcd\" CA certificate] ***",
                                "Sunday 01 March 2026  21:17:51 +0000 (0:00:01.700)       0:12:39.311 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Retrieve \"etcd\" client certificate] ***",
                                "Sunday 01 March 2026  21:17:51 +0000 (0:00:00.297)       0:12:39.609 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Retrieve \"etcd\" client key] ***",
                                "Sunday 01 March 2026  21:17:52 +0000 (0:00:00.196)       0:12:39.805 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Create secrets for monitoring] ***",
                                "Sunday 01 March 2026  21:17:52 +0000 (0:00:00.202)       0:12:40.008 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Generate client secret passwords] ***",
                                "Sunday 01 March 2026  21:17:53 +0000 (0:00:00.746)       0:12:40.755 **********",
                                "changed: [instance] => (item=alertmanager)",
                                "changed: [instance] => (item=grafana)",
                                "changed: [instance] => (item=prometheus)",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Collect all client secrets] ***",
                                "Sunday 01 March 2026  21:18:10 +0000 (0:00:17.058)       0:12:57.814 **********",
                                "ok: [instance] => (item=alertmanager)",
                                "ok: [instance] => (item=grafana)",
                                "ok: [instance] => (item=prometheus)",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Create Keycloak clients] *****",
                                "Sunday 01 March 2026  21:18:12 +0000 (0:00:01.931)       0:12:59.745 **********",
                                "changed: [instance] => (item=None)",
                                "changed: [instance] => (item=None)",
                                "changed: [instance] => (item=None)",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Create Keycloak roles] *******",
                                "Sunday 01 March 2026  21:18:15 +0000 (0:00:03.975)       0:13:03.721 **********",
                                "changed: [instance] => (item=None)",
                                "changed: [instance] => (item=None)",
                                "changed: [instance] => (item=None)",
                                "changed: [instance] => (item=None)",
                                "changed: [instance] => (item=None)",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Generate cookie secrets] *****",
                                "Sunday 01 March 2026  21:18:21 +0000 (0:00:05.104)       0:13:08.826 **********",
                                "changed: [instance] => (item=alertmanager)",
                                "changed: [instance] => (item=prometheus)",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Generate OAuth2 proxy configuration] ***",
                                "Sunday 01 March 2026  21:18:32 +0000 (0:00:11.337)       0:13:20.163 **********",
                                "changed: [instance] => (item=alertmanager)",
                                "changed: [instance] => (item=prometheus)",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Create certificate issuer] ***",
                                "Sunday 01 March 2026  21:18:43 +0000 (0:00:11.392)       0:13:31.556 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Install all CRDs] ************",
                                "Sunday 01 March 2026  21:18:44 +0000 (0:00:00.700)       0:13:32.257 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Deploy additional dashboards] ***",
                                "Sunday 01 March 2026  21:18:52 +0000 (0:00:07.857)       0:13:40.115 **********",
                                "changed: [instance] => (item={'name': 'haproxy', 'state': 'present'})",
                                "changed: [instance] => (item={'name': 'goldpinger', 'state': 'present'})",
                                "changed: [instance] => (item={'name': 'node-exporter-full', 'state': 'present'})",
                                "changed: [instance] => (item={'name': 'ceph-cluster', 'state': 'present'})",
                                "changed: [instance] => (item={'name': 'ceph-cluster-advanced', 'state': 'present'})",
                                "changed: [instance] => (item={'name': 'hosts-overview', 'state': 'present'})",
                                "changed: [instance] => (item={'name': 'host-details', 'state': 'present'})",
                                "changed: [instance] => (item={'name': 'pool-overview', 'state': 'present'})",
                                "changed: [instance] => (item={'name': 'pool-detail', 'state': 'present'})",
                                "changed: [instance] => (item={'name': 'osds-overview', 'state': 'present'})",
                                "changed: [instance] => (item={'name': 'osd-device-details', 'state': 'present'})",
                                "changed: [instance] => (item={'name': 'rbd-overview', 'state': 'present'})",
                                "changed: [instance] => (item={'name': 'rbd-details', 'state': 'present'})",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Deploy Helm chart] ***********",
                                "Sunday 01 March 2026  21:19:06 +0000 (0:00:14.378)       0:13:54.493 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:19:37 +0000 (0:00:30.991)       0:14:25.485 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:19:37 +0000 (0:00:00.079)       0:14:25.564 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.loki : Deploy Helm chart] ****************************",
                                "Sunday 01 March 2026  21:19:38 +0000 (0:00:00.621)       0:14:26.186 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:19:42 +0000 (0:00:03.918)       0:14:30.104 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:19:42 +0000 (0:00:00.064)       0:14:30.169 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.vector : Deploy Helm chart] **************************",
                                "Sunday 01 March 2026  21:19:43 +0000 (0:00:00.628)       0:14:30.798 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:19:45 +0000 (0:00:02.592)       0:14:33.390 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:19:45 +0000 (0:00:00.051)       0:14:33.442 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.goldpinger : Deploy Helm chart] **********************",
                                "Sunday 01 March 2026  21:19:46 +0000 (0:00:00.502)       0:14:33.945 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ipmi_exporter : Deploy service] **********************",
                                "Sunday 01 March 2026  21:19:48 +0000 (0:00:02.173)       0:14:36.118 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:19:49 +0000 (0:00:00.803)       0:14:36.921 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:19:49 +0000 (0:00:00.058)       0:14:36.980 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.prometheus_pushgateway : Deploy Helm chart] **********",
                                "Sunday 01 March 2026  21:19:49 +0000 (0:00:00.584)       0:14:37.565 **********",
                                "changed: [instance]",
                                "",
                                "PLAY [Deploy OpenStack] ********************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:19:51 +0000 (0:00:01.632)       0:14:39.198 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:19:53 +0000 (0:00:02.108)       0:14:41.307 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:19:53 +0000 (0:00:00.074)       0:14:41.381 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:19:53 +0000 (0:00:00.050)       0:14:41.431 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:19:53 +0000 (0:00:00.053)       0:14:41.485 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:19:53 +0000 (0:00:00.055)       0:14:41.540 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:19:53 +0000 (0:00:00.054)       0:14:41.594 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:19:53 +0000 (0:00:00.064)       0:14:41.658 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:19:53 +0000 (0:00:00.062)       0:14:41.721 **********",
                                "ok: [instance] => (item=oslo_cache)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:19:54 +0000 (0:00:00.074)       0:14:41.795 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:19:54 +0000 (0:00:00.072)       0:14:41.868 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:19:54 +0000 (0:00:00.075)       0:14:41.944 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.memcached : Deploy Helm chart] ***********************",
                                "Sunday 01 March 2026  21:19:54 +0000 (0:00:00.540)       0:14:42.484 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.memcached : Apply manifests for monitoring] **********",
                                "Sunday 01 March 2026  21:19:55 +0000 (0:00:01.237)       0:14:43.722 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstacksdk : Install openstacksdk] *****************",
                                "Sunday 01 March 2026  21:19:56 +0000 (0:00:00.999)       0:14:44.721 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstacksdk : Create openstack config directory] ****",
                                "Sunday 01 March 2026  21:20:02 +0000 (0:00:05.350)       0:14:50.072 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstacksdk : Generate cloud config file] ***********",
                                "Sunday 01 March 2026  21:20:02 +0000 (0:00:00.231)       0:14:50.304 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:20:03 +0000 (0:00:00.541)       0:14:50.846 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:20:03 +0000 (0:00:00.089)       0:14:50.935 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  21:20:03 +0000 (0:00:00.093)       0:14:51.029 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  21:20:04 +0000 (0:00:00.805)       0:14:51.835 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  21:20:04 +0000 (0:00:00.072)       0:14:51.908 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  21:20:04 +0000 (0:00:00.058)       0:14:51.966 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  21:20:04 +0000 (0:00:00.053)       0:14:52.019 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:21:05 +0000 (0:01:00.977)       0:15:52.997 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:21:05 +0000 (0:00:00.666)       0:15:53.664 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:21:05 +0000 (0:00:00.065)       0:15:53.729 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:21:06 +0000 (0:00:00.060)       0:15:53.789 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:21:06 +0000 (0:00:00.055)       0:15:53.845 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:21:06 +0000 (0:00:00.052)       0:15:53.898 **********",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:21:06 +0000 (0:00:00.092)       0:15:53.990 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:21:06 +0000 (0:00:00.083)       0:15:54.074 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:21:06 +0000 (0:00:00.073)       0:15:54.147 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Create Keycloak realms] *******************",
                                "Sunday 01 March 2026  21:21:06 +0000 (0:00:00.538)       0:15:54.685 **********",
                                "ok: [instance] => (item=None)",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Setup Keycloak Authentication Required Actions (MFA)] ***",
                                "Sunday 01 March 2026  21:21:08 +0000 (0:00:01.449)       0:15:56.135 **********",
                                "ok: [instance] => (item=atmosphere)",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Create ConfigMap with all OpenID connect configurations] ***",
                                "Sunday 01 March 2026  21:21:09 +0000 (0:00:01.276)       0:15:57.412 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Create Keycloak clients] ******************",
                                "Sunday 01 March 2026  21:21:10 +0000 (0:00:00.917)       0:15:58.330 **********",
                                "changed: [instance] => (item=None)",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Assign realm-management roles to service account] ***",
                                "Sunday 01 March 2026  21:21:11 +0000 (0:00:01.199)       0:15:59.530 **********",
                                "changed: [instance] => (item=None)",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Deploy Helm chart] ************************",
                                "Sunday 01 March 2026  21:21:13 +0000 (0:00:01.713)       0:16:01.243 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  21:23:52 +0000 (0:02:39.018)       0:18:40.262 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  21:23:52 +0000 (0:00:00.110)       0:18:40.372 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  21:23:52 +0000 (0:00:00.065)       0:18:40.438 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  21:23:52 +0000 (0:00:00.062)       0:18:40.500 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress identity] *************************************************",
                                "Sunday 01 March 2026  21:23:52 +0000 (0:00:00.071)       0:18:40.572 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress identity] *******************",
                                "Sunday 01 March 2026  21:23:52 +0000 (0:00:00.080)       0:18:40.652 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Validate if ingress is reachable] *********",
                                "Sunday 01 March 2026  21:23:53 +0000 (0:00:01.011)       0:18:41.664 **********",
                                "FAILED - RETRYING: [instance]: Validate if ingress is reachable (120 retries left).",
                                "FAILED - RETRYING: [instance]: Validate if ingress is reachable (119 retries left).",
                                "FAILED - RETRYING: [instance]: Validate if ingress is reachable (118 retries left).",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Wait until identity service ready] ********",
                                "Sunday 01 March 2026  21:23:58 +0000 (0:00:04.407)       0:18:46.071 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Create Keystone domains] ******************",
                                "Sunday 01 March 2026  21:23:59 +0000 (0:00:00.716)       0:18:46.788 **********",
                                "ok: [instance] => (item=atmosphere)",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Create Keystone identity providers] *******",
                                "Sunday 01 March 2026  21:24:00 +0000 (0:00:01.261)       0:18:48.049 **********",
                                "changed: [instance] => (item=atmosphere)",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Create Keystone federation mappings] ******",
                                "Sunday 01 March 2026  21:24:01 +0000 (0:00:01.185)       0:18:49.235 **********",
                                "changed: [instance] => (item=atmosphere)",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Create Keystone federation protocols] *****",
                                "Sunday 01 March 2026  21:24:02 +0000 (0:00:01.199)       0:18:50.435 **********",
                                "changed: [instance] => (item=atmosphere)",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/etc/nerdctl)] *********",
                                "Sunday 01 March 2026  21:24:03 +0000 (0:00:01.259)       0:18:51.695 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  21:24:04 +0000 (0:00:00.238)       0:18:51.934 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Sunday 01 March 2026  21:24:05 +0000 (0:00:01.188)       0:18:53.122 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:24:05 +0000 (0:00:00.270)       0:18:53.393 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/nerdctl/releases/download/v2.2.0/nerdctl-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:24:05 +0000 (0:00:00.086)       0:18:53.480 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:24:06 +0000 (0:00:00.518)       0:18:53.998 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.nerdctl : Create nerdctl config] *********************",
                                "Sunday 01 March 2026  21:24:07 +0000 (0:00:01.280)       0:18:55.278 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Uninstall OpenStack client system packages] ***",
                                "Sunday 01 March 2026  21:24:08 +0000 (0:00:00.538)       0:18:55.817 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Uninstall Ubuntu Cloud Archive keyring] ***",
                                "Sunday 01 March 2026  21:24:08 +0000 (0:00:00.773)       0:18:56.590 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Remove Ubuntu Cloud Archive repository] ***",
                                "Sunday 01 March 2026  21:24:09 +0000 (0:00:00.777)       0:18:57.368 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Generate OpenStack-Helm endpoints] ***************************************",
                                "Sunday 01 March 2026  21:24:10 +0000 (0:00:00.373)       0:18:57.741 **********",
                                "included: openstack_helm_endpoints for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:24:10 +0000 (0:00:00.124)       0:18:57.866 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:24:10 +0000 (0:00:00.062)       0:18:57.928 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:24:10 +0000 (0:00:00.052)       0:18:57.981 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:24:10 +0000 (0:00:00.049)       0:18:58.031 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:24:10 +0000 (0:00:00.050)       0:18:58.081 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:24:10 +0000 (0:00:00.053)       0:18:58.134 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:24:10 +0000 (0:00:00.058)       0:18:58.192 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:24:10 +0000 (0:00:00.063)       0:18:58.256 **********",
                                "ok: [instance] => (item=identity)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:24:10 +0000 (0:00:00.070)       0:18:58.327 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Generate openrc file] ****************",
                                "Sunday 01 March 2026  21:24:10 +0000 (0:00:00.072)       0:18:58.400 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Generate openstack aliases] **********",
                                "Sunday 01 March 2026  21:24:11 +0000 (0:00:00.561)       0:18:58.962 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:24:12 +0000 (0:00:00.795)       0:18:59.757 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:24:12 +0000 (0:00:00.089)       0:18:59.846 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  21:24:12 +0000 (0:00:00.092)       0:18:59.939 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  21:24:12 +0000 (0:00:00.737)       0:19:00.676 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  21:24:13 +0000 (0:00:00.059)       0:19:00.736 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  21:24:13 +0000 (0:00:00.054)       0:19:00.790 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  21:24:13 +0000 (0:00:00.052)       0:19:00.842 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:24:44 +0000 (0:00:30.921)       0:19:31.764 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:24:44 +0000 (0:00:00.702)       0:19:32.466 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:24:44 +0000 (0:00:00.066)       0:19:32.533 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:24:44 +0000 (0:00:00.060)       0:19:32.593 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:24:44 +0000 (0:00:00.060)       0:19:32.653 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:24:44 +0000 (0:00:00.059)       0:19:32.713 **********",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=key_manager)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:24:45 +0000 (0:00:00.122)       0:19:32.835 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:24:45 +0000 (0:00:00.074)       0:19:32.909 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:24:45 +0000 (0:00:00.067)       0:19:32.977 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.barbican : Deploy Helm chart] ************************",
                                "Sunday 01 March 2026  21:24:45 +0000 (0:00:00.562)       0:19:33.539 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  21:26:16 +0000 (0:01:31.044)       0:21:04.584 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  21:26:16 +0000 (0:00:00.118)       0:21:04.703 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  21:26:17 +0000 (0:00:00.066)       0:21:04.769 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  21:26:17 +0000 (0:00:00.061)       0:21:04.831 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress key-manager] **********************************************",
                                "Sunday 01 March 2026  21:26:17 +0000 (0:00:00.079)       0:21:04.911 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress key-manager] ****************",
                                "Sunday 01 March 2026  21:26:17 +0000 (0:00:00.082)       0:21:04.993 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.barbican : Create creator role] **********************",
                                "Sunday 01 March 2026  21:26:18 +0000 (0:00:01.054)       0:21:06.047 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.barbican : Add implied roles] ************************",
                                "Sunday 01 March 2026  21:26:19 +0000 (0:00:01.217)       0:21:07.265 **********",
                                "changed: [instance] => (item={'role': 'member', 'implies': 'creator'})",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:26:26 +0000 (0:00:07.254)       0:21:14.520 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:26:26 +0000 (0:00:00.074)       0:21:14.594 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph : Deploy Helm chart] ***********************",
                                "Sunday 01 March 2026  21:26:27 +0000 (0:00:00.534)       0:21:15.129 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:26:30 +0000 (0:00:02.706)       0:21:17.835 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:26:30 +0000 (0:00:00.068)       0:21:17.904 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:26:30 +0000 (0:00:00.052)       0:21:17.956 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:26:30 +0000 (0:00:00.059)       0:21:18.015 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:26:30 +0000 (0:00:00.053)       0:21:18.069 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:26:30 +0000 (0:00:00.058)       0:21:18.128 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:26:30 +0000 (0:00:00.051)       0:21:18.180 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:26:30 +0000 (0:00:00.069)       0:21:18.250 **********",
                                "ok: [instance] => (item=rook_ceph_cluster)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:26:30 +0000 (0:00:00.070)       0:21:18.321 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:26:30 +0000 (0:00:00.072)       0:21:18.394 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:26:30 +0000 (0:00:00.073)       0:21:18.468 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Set mgr/cephadm/warn_on_stray_daemons to false] ***",
                                "Sunday 01 March 2026  21:26:31 +0000 (0:00:00.517)       0:21:18.985 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Collect \"ceph quorum_status\" output from a monitor] ***",
                                "Sunday 01 March 2026  21:26:58 +0000 (0:00:26.849)       0:21:45.835 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Retrieve keyring for client.admin] ***",
                                "Sunday 01 March 2026  21:27:12 +0000 (0:00:14.652)       0:22:00.488 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Retrieve keyring for monitors] ***",
                                "Sunday 01 March 2026  21:27:14 +0000 (0:00:01.560)       0:22:02.048 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Create Ceph cluster resource] ****",
                                "Sunday 01 March 2026  21:27:15 +0000 (0:00:01.259)       0:22:03.308 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Deploy Helm chart] ***************",
                                "Sunday 01 March 2026  21:27:16 +0000 (0:00:00.783)       0:22:04.091 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Create OpenStack user] ***********",
                                "Sunday 01 March 2026  21:27:19 +0000 (0:00:02.749)       0:22:06.841 **********",
                                "[WARNING]: Module did not set no_log for update_password",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Grant access to \"service\" project] ***",
                                "Sunday 01 March 2026  21:27:21 +0000 (0:00:01.911)       0:22:08.752 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Create OpenStack service] ********",
                                "Sunday 01 March 2026  21:27:27 +0000 (0:00:06.930)       0:22:15.683 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Create OpenStack endpoints] ******",
                                "Sunday 01 March 2026  21:27:29 +0000 (0:00:01.151)       0:22:16.834 **********",
                                "changed: [instance] => (item={'interface': 'public', 'url': 'https://object-store.199-204-45-156.nip.io/swift/v1/%(tenant_id)s'})",
                                "changed: [instance] => (item={'interface': 'internal', 'url': 'http://rook-ceph-rgw-ceph.openstack.svc.cluster.local/swift/v1/%(tenant_id)s'})",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  21:27:31 +0000 (0:00:02.083)       0:22:18.918 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  21:27:31 +0000 (0:00:00.114)       0:22:19.033 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  21:27:31 +0000 (0:00:00.052)       0:22:19.085 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  21:27:31 +0000 (0:00:00.056)       0:22:19.141 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress rook-ceph-cluster] ****************************************",
                                "Sunday 01 March 2026  21:27:31 +0000 (0:00:00.076)       0:22:19.218 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress rook-ceph-cluster] **********",
                                "Sunday 01 March 2026  21:27:31 +0000 (0:00:00.089)       0:22:19.307 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:27:32 +0000 (0:00:01.004)       0:22:20.312 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:27:32 +0000 (0:00:00.067)       0:22:20.379 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Collect \"ceph mon dump\" output from a monitor] ***",
                                "Sunday 01 March 2026  21:27:33 +0000 (0:00:00.561)       0:22:20.941 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Generate fact with list of Ceph monitors] ***",
                                "Sunday 01 March 2026  21:27:47 +0000 (0:00:14.654)       0:22:35.596 **********",
                                "ok: [instance] => (item=10.96.240.200)",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Create Ceph service] *************",
                                "Sunday 01 March 2026  21:27:47 +0000 (0:00:00.079)       0:22:35.675 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Create Ceph endpoints] ***********",
                                "Sunday 01 March 2026  21:27:48 +0000 (0:00:00.710)       0:22:36.385 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Retrieve client.admin keyring] ***",
                                "Sunday 01 March 2026  21:27:49 +0000 (0:00:00.768)       0:22:37.154 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Parse client.admin keyring] ******",
                                "Sunday 01 March 2026  21:27:50 +0000 (0:00:01.324)       0:22:38.478 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Create \"pvc-ceph-client-key\" secret] ***",
                                "Sunday 01 March 2026  21:27:50 +0000 (0:00:00.068)       0:22:38.547 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Deploy Helm chart] ***************",
                                "Sunday 01 March 2026  21:27:51 +0000 (0:00:00.711)       0:22:39.259 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:27:52 +0000 (0:00:01.283)       0:22:40.543 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:27:53 +0000 (0:00:00.218)       0:22:40.761 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  21:27:53 +0000 (0:00:00.086)       0:22:40.848 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  21:27:53 +0000 (0:00:00.689)       0:22:41.537 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  21:27:53 +0000 (0:00:00.169)       0:22:41.707 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  21:27:54 +0000 (0:00:00.054)       0:22:41.762 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  21:27:54 +0000 (0:00:00.060)       0:22:41.822 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:28:24 +0000 (0:00:30.866)       0:23:12.689 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:28:25 +0000 (0:00:00.716)       0:23:13.405 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:28:25 +0000 (0:00:00.073)       0:23:13.478 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:28:25 +0000 (0:00:00.061)       0:23:13.540 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:28:25 +0000 (0:00:00.066)       0:23:13.606 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:28:25 +0000 (0:00:00.063)       0:23:13.670 **********",
                                "ok: [instance] => (item=image)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=dashboard)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:28:26 +0000 (0:00:00.131)       0:23:13.802 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:28:26 +0000 (0:00:00.083)       0:23:13.885 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:28:26 +0000 (0:00:00.085)       0:23:13.970 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance : Deploy Helm chart] **************************",
                                "Sunday 01 March 2026  21:28:26 +0000 (0:00:00.544)       0:23:14.515 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  21:30:33 +0000 (0:02:06.719)       0:25:21.234 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  21:30:33 +0000 (0:00:00.117)       0:25:21.352 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  21:30:33 +0000 (0:00:00.050)       0:25:21.403 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  21:30:33 +0000 (0:00:00.059)       0:25:21.462 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress image] ****************************************************",
                                "Sunday 01 March 2026  21:30:33 +0000 (0:00:00.075)       0:25:21.538 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress image] **********************",
                                "Sunday 01 March 2026  21:30:33 +0000 (0:00:00.077)       0:25:21.616 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Create images] ***********************************************************",
                                "Sunday 01 March 2026  21:30:34 +0000 (0:00:01.031)       0:25:22.647 **********",
                                "included: glance_image for instance => (item={'container_format': 'bare', 'disk_format': 'raw', 'is_public': True, 'min_disk': 1, 'name': 'cirros', 'url': 'http://download.cirros-cloud.net/0.6.2/cirros-0.6.2-x86_64-disk.img'})",
                                "",
                                "TASK [vexxhost.atmosphere.qemu_utils : Install packages] ***********************",
                                "Sunday 01 March 2026  21:30:35 +0000 (0:00:00.132)       0:25:22.780 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************",
                                "Sunday 01 March 2026  21:30:38 +0000 (0:00:03.180)       0:25:25.960 **********",
                                "FAILED - RETRYING: [instance]: Check if image exists (120 retries left).",
                                "FAILED - RETRYING: [instance]: Check if image exists (119 retries left).",
                                "FAILED - RETRYING: [instance]: Check if image exists (118 retries left).",
                                "FAILED - RETRYING: [instance]: Check if image exists (117 retries left).",
                                "FAILED - RETRYING: [instance]: Check if image exists (116 retries left).",
                                "FAILED - RETRYING: [instance]: Check if image exists (115 retries left).",
                                "FAILED - RETRYING: [instance]: Check if image exists (114 retries left).",
                                "FAILED - RETRYING: [instance]: Check if image exists (113 retries left).",
                                "FAILED - RETRYING: [instance]: Check if image exists (112 retries left).",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****",
                                "Sunday 01 March 2026  21:30:56 +0000 (0:00:18.553)       0:25:44.514 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Download image] ***********************",
                                "Sunday 01 March 2026  21:30:57 +0000 (0:00:00.224)       0:25:44.739 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Get image format] *********************",
                                "Sunday 01 March 2026  21:30:57 +0000 (0:00:00.782)       0:25:45.522 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***",
                                "Sunday 01 March 2026  21:30:58 +0000 (0:00:00.239)       0:25:45.761 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******",
                                "Sunday 01 March 2026  21:30:58 +0000 (0:00:00.316)       0:25:46.078 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************",
                                "Sunday 01 March 2026  21:30:59 +0000 (0:00:00.725)       0:25:46.804 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************",
                                "Sunday 01 March 2026  21:31:04 +0000 (0:00:05.543)       0:25:52.347 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:31:04 +0000 (0:00:00.324)       0:25:52.671 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:31:05 +0000 (0:00:00.078)       0:25:52.749 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:31:05 +0000 (0:00:00.053)       0:25:52.803 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:31:05 +0000 (0:00:00.056)       0:25:52.859 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:31:05 +0000 (0:00:00.057)       0:25:52.917 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:31:05 +0000 (0:00:00.063)       0:25:52.980 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:31:05 +0000 (0:00:00.059)       0:25:53.039 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:31:05 +0000 (0:00:00.060)       0:25:53.100 **********",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=staffeln)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=oslo_db)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:31:05 +0000 (0:00:00.102)       0:25:53.203 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:31:05 +0000 (0:00:00.083)       0:25:53.286 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:31:05 +0000 (0:00:00.078)       0:25:53.365 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.staffeln : Deploy Helm chart] ************************",
                                "Sunday 01 March 2026  21:31:06 +0000 (0:00:00.557)       0:25:53.922 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:31:27 +0000 (0:00:20.890)       0:26:14.813 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:31:27 +0000 (0:00:00.099)       0:26:14.912 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  21:31:27 +0000 (0:00:00.089)       0:26:15.002 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  21:31:27 +0000 (0:00:00.722)       0:26:15.724 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  21:31:28 +0000 (0:00:00.056)       0:26:15.781 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  21:31:28 +0000 (0:00:00.061)       0:26:15.842 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  21:31:28 +0000 (0:00:00.057)       0:26:15.899 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:31:59 +0000 (0:00:30.872)       0:26:46.771 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:31:59 +0000 (0:00:00.668)       0:26:47.440 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:31:59 +0000 (0:00:00.065)       0:26:47.505 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:31:59 +0000 (0:00:00.061)       0:26:47.567 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:31:59 +0000 (0:00:00.063)       0:26:47.630 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:31:59 +0000 (0:00:00.063)       0:26:47.694 **********",
                                "ok: [instance] => (item=volumev3)",
                                "ok: [instance] => (item=image)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "ok: [instance] => (item=identity)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:32:00 +0000 (0:00:00.138)       0:26:47.832 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:32:00 +0000 (0:00:00.071)       0:26:47.904 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:32:00 +0000 (0:00:00.064)       0:26:47.968 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cinder : Generate Helm values] ***********************",
                                "Sunday 01 March 2026  21:32:00 +0000 (0:00:00.554)       0:26:48.523 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cinder : Append Helm values (Staffeln)] **************",
                                "Sunday 01 March 2026  21:32:01 +0000 (0:00:00.358)       0:26:48.881 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cinder : Deploy Helm chart] **************************",
                                "Sunday 01 March 2026  21:32:01 +0000 (0:00:00.059)       0:26:48.940 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  21:36:43 +0000 (0:04:42.760)       0:31:31.701 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  21:36:44 +0000 (0:00:00.104)       0:31:31.806 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  21:36:44 +0000 (0:00:00.052)       0:31:31.858 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  21:36:44 +0000 (0:00:00.051)       0:31:31.909 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress volumev3] *************************************************",
                                "Sunday 01 March 2026  21:36:44 +0000 (0:00:00.193)       0:31:32.102 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress volumev3] *******************",
                                "Sunday 01 March 2026  21:36:44 +0000 (0:00:00.073)       0:31:32.175 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:36:45 +0000 (0:00:01.001)       0:31:33.177 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:36:45 +0000 (0:00:00.085)       0:31:33.263 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:36:45 +0000 (0:00:00.052)       0:31:33.315 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:36:45 +0000 (0:00:00.050)       0:31:33.365 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:36:45 +0000 (0:00:00.042)       0:31:33.408 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:36:45 +0000 (0:00:00.053)       0:31:33.461 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:36:45 +0000 (0:00:00.051)       0:31:33.513 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:36:45 +0000 (0:00:00.053)       0:31:33.567 **********",
                                "ok: [instance] => (item=placement)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=oslo_db)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:36:45 +0000 (0:00:00.097)       0:31:33.664 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:36:46 +0000 (0:00:00.074)       0:31:33.739 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:36:46 +0000 (0:00:00.076)       0:31:33.815 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.placement : Deploy Helm chart] ***********************",
                                "Sunday 01 March 2026  21:36:46 +0000 (0:00:00.533)       0:31:34.348 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  21:38:05 +0000 (0:01:18.409)       0:32:52.757 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  21:38:05 +0000 (0:00:00.101)       0:32:52.859 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  21:38:05 +0000 (0:00:00.060)       0:32:52.919 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  21:38:05 +0000 (0:00:00.056)       0:32:52.976 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress placement] ************************************************",
                                "Sunday 01 March 2026  21:38:05 +0000 (0:00:00.067)       0:32:53.044 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress placement] ******************",
                                "Sunday 01 March 2026  21:38:05 +0000 (0:00:00.075)       0:32:53.119 **********",
                                "changed: [instance]",
                                "",
                                "PLAY [Configure operating system] **********************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:38:06 +0000 (0:00:01.000)       0:32:54.120 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.lpfc : Detect if the \"lpfc\" module is loaded] ********",
                                "Sunday 01 March 2026  21:38:08 +0000 (0:00:02.613)       0:32:56.734 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.lpfc : Install the configuration file] ***************",
                                "Sunday 01 March 2026  21:38:09 +0000 (0:00:00.255)       0:32:56.989 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.lpfc : Get the values for the module parameters] *****",
                                "Sunday 01 March 2026  21:38:09 +0000 (0:00:00.056)       0:32:57.046 **********",
                                "skipping: [instance] => (item=lpfc_lun_queue_depth)",
                                "skipping: [instance] => (item=lpfc_sg_seg_cnt)",
                                "skipping: [instance] => (item=lpfc_max_luns)",
                                "skipping: [instance] => (item=lpfc_enable_fc4_type)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.lpfc : Detect if the run-time module parameters are set correctly] ***",
                                "Sunday 01 March 2026  21:38:09 +0000 (0:00:00.067)       0:32:57.113 **********",
                                "skipping: [instance] => (item=lpfc_lun_queue_depth)",
                                "skipping: [instance] => (item=lpfc_sg_seg_cnt)",
                                "skipping: [instance] => (item=lpfc_max_luns)",
                                "skipping: [instance] => (item=lpfc_enable_fc4_type)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.lpfc : Update \"initramfs\" if the configuration file has changed] ***",
                                "Sunday 01 March 2026  21:38:09 +0000 (0:00:00.063)       0:32:57.177 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [Reboot the system if the configuration file has changed] *****************",
                                "Sunday 01 March 2026  21:38:09 +0000 (0:00:00.055)       0:32:57.233 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Add backports PPA] **********************",
                                "Sunday 01 March 2026  21:38:09 +0000 (0:00:00.077)       0:32:57.310 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Install the multipathd package] *********",
                                "Sunday 01 March 2026  21:38:13 +0000 (0:00:04.191)       0:33:01.502 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Install the configuration file] *********",
                                "Sunday 01 March 2026  21:38:36 +0000 (0:00:22.617)       0:33:24.119 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.iscsi : Ensure iscsid is started] ********************",
                                "Sunday 01 March 2026  21:38:36 +0000 (0:00:00.555)       0:33:24.674 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.udev : Add udev rules for Pure Storage FlashArray] ***",
                                "Sunday 01 March 2026  21:38:37 +0000 (0:00:00.449)       0:33:25.124 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.udev : Add udev rules for SCSI Unit Attention] *******",
                                "Sunday 01 March 2026  21:38:37 +0000 (0:00:00.485)       0:33:25.609 **********",
                                "changed: [instance]",
                                "",
                                "RUNNING HANDLER [vexxhost.atmosphere.multipathd : Restart \"multipathd\"] ********",
                                "Sunday 01 March 2026  21:38:38 +0000 (0:00:00.533)       0:33:26.142 **********",
                                "changed: [instance]",
                                "",
                                "RUNNING HANDLER [vexxhost.atmosphere.udev : Reload udev] ***********************",
                                "Sunday 01 March 2026  21:38:38 +0000 (0:00:00.486)       0:33:26.629 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [Deploy SDN] **************************************************************",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:38:39 +0000 (0:00:00.289)       0:33:26.919 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:38:39 +0000 (0:00:00.087)       0:33:27.006 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:38:39 +0000 (0:00:00.054)       0:33:27.061 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:38:39 +0000 (0:00:00.050)       0:33:27.112 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:38:39 +0000 (0:00:00.052)       0:33:27.165 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:38:39 +0000 (0:00:00.213)       0:33:27.378 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:38:39 +0000 (0:00:00.063)       0:33:27.442 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:38:39 +0000 (0:00:00.058)       0:33:27.500 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:38:39 +0000 (0:00:00.052)       0:33:27.553 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:38:39 +0000 (0:00:00.086)       0:33:27.639 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:38:39 +0000 (0:00:00.077)       0:33:27.716 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openvswitch : Get the current status of all systemd values for containerd] ***",
                                "Sunday 01 March 2026  21:38:40 +0000 (0:00:00.542)       0:33:28.259 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openvswitch : Assert that LimitMEMLOCK is set to infinity] ***",
                                "Sunday 01 March 2026  21:38:40 +0000 (0:00:00.231)       0:33:28.490 **********",
                                "ok: [instance] => {",
                                "    \"changed\": false,",
                                "    \"msg\": \"All assertions passed\"",
                                "}",
                                "",
                                "TASK [vexxhost.atmosphere.openvswitch : Pull openvswitch image] ****************",
                                "Sunday 01 March 2026  21:38:40 +0000 (0:00:00.062)       0:33:28.552 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openvswitch : Verify openvswitch image pull] *********",
                                "Sunday 01 March 2026  21:38:47 +0000 (0:00:06.685)       0:33:35.237 **********",
                                "ok: [instance] => {",
                                "    \"changed\": false,",
                                "    \"msg\": \"Successfully pulled openvswitch image\"",
                                "}",
                                "",
                                "TASK [vexxhost.atmosphere.openvswitch : Deploy Helm chart] *********************",
                                "Sunday 01 March 2026  21:38:47 +0000 (0:00:00.056)       0:33:35.294 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:38:48 +0000 (0:00:01.414)       0:33:36.709 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:38:49 +0000 (0:00:00.067)       0:33:36.776 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ovn : Check if ovn_controller DaemonSet exists] ******",
                                "Sunday 01 March 2026  21:38:49 +0000 (0:00:00.542)       0:33:37.319 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ovn : Delete existing ovn controller DaemonSet if type label is found] ***",
                                "Sunday 01 March 2026  21:38:50 +0000 (0:00:01.222)       0:33:38.541 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ovn : Pull ovn-controller image] *********************",
                                "Sunday 01 March 2026  21:38:50 +0000 (0:00:00.067)       0:33:38.608 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ovn : Verify ovn-controller image pull] **************",
                                "Sunday 01 March 2026  21:38:56 +0000 (0:00:05.854)       0:33:44.463 **********",
                                "ok: [instance] => {",
                                "    \"changed\": false,",
                                "    \"msg\": \"Successfully pulled ovn-controller image\"",
                                "}",
                                "",
                                "TASK [vexxhost.atmosphere.ovn : Deploy Helm chart] *****************************",
                                "Sunday 01 March 2026  21:38:56 +0000 (0:00:00.059)       0:33:44.523 **********",
                                "changed: [instance]",
                                "",
                                "PLAY [controllers[0]] **********************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  21:38:58 +0000 (0:00:01.914)       0:33:46.438 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:39:01 +0000 (0:00:02.831)       0:33:49.270 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:39:01 +0000 (0:00:00.088)       0:33:49.358 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:39:01 +0000 (0:00:00.060)       0:33:49.419 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:39:01 +0000 (0:00:00.077)       0:33:49.496 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:39:01 +0000 (0:00:00.075)       0:33:49.571 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:39:01 +0000 (0:00:00.065)       0:33:49.637 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:39:01 +0000 (0:00:00.064)       0:33:49.701 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:39:02 +0000 (0:00:00.074)       0:33:49.775 **********",
                                "ok: [instance] => (item=libvirt_exporter)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:39:02 +0000 (0:00:00.073)       0:33:49.849 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:39:02 +0000 (0:00:00.086)       0:33:49.936 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:39:02 +0000 (0:00:00.090)       0:33:50.026 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.libvirt : Create CA certificates] ********************",
                                "Sunday 01 March 2026  21:39:02 +0000 (0:00:00.571)       0:33:50.598 **********",
                                "changed: [instance] => (item=libvirt-vnc)",
                                "changed: [instance] => (item=libvirt-api)",
                                "",
                                "TASK [vexxhost.atmosphere.libvirt : Create Issuers] ****************************",
                                "Sunday 01 March 2026  21:39:04 +0000 (0:00:01.555)       0:33:52.154 **********",
                                "changed: [instance] => (item=libvirt-vnc)",
                                "changed: [instance] => (item=libvirt-api)",
                                "",
                                "TASK [vexxhost.atmosphere.libvirt : Deploy Helm chart] *************************",
                                "Sunday 01 March 2026  21:39:05 +0000 (0:00:01.395)       0:33:53.549 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:39:07 +0000 (0:00:01.861)       0:33:55.411 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:39:07 +0000 (0:00:00.086)       0:33:55.497 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.coredns : Deploy Helm chart] *************************",
                                "Sunday 01 March 2026  21:39:08 +0000 (0:00:00.533)       0:33:56.031 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstacksdk : Install openstacksdk] *****************",
                                "Sunday 01 March 2026  21:39:08 +0000 (0:00:00.097)       0:33:56.128 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstacksdk : Create openstack config directory] ****",
                                "Sunday 01 March 2026  21:39:09 +0000 (0:00:01.007)       0:33:57.136 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstacksdk : Generate cloud config file] ***********",
                                "Sunday 01 March 2026  21:39:09 +0000 (0:00:00.242)       0:33:57.378 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:39:10 +0000 (0:00:00.534)       0:33:57.913 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:39:10 +0000 (0:00:00.114)       0:33:58.027 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  21:39:10 +0000 (0:00:00.103)       0:33:58.130 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  21:39:11 +0000 (0:00:00.777)       0:33:58.908 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  21:39:11 +0000 (0:00:00.065)       0:33:58.973 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  21:39:11 +0000 (0:00:00.065)       0:33:59.039 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  21:39:11 +0000 (0:00:00.072)       0:33:59.112 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:39:42 +0000 (0:00:30.914)       0:34:30.026 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:39:43 +0000 (0:00:00.771)       0:34:30.798 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:39:43 +0000 (0:00:00.069)       0:34:30.867 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:39:43 +0000 (0:00:00.060)       0:34:30.928 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:39:43 +0000 (0:00:00.068)       0:34:30.996 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:39:43 +0000 (0:00:00.074)       0:34:31.070 **********",
                                "ok: [instance] => (item=volumev3)",
                                "ok: [instance] => (item=oslo_db_cell0)",
                                "ok: [instance] => (item=image)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "ok: [instance] => (item=oslo_db_api)",
                                "ok: [instance] => (item=network)",
                                "ok: [instance] => (item=compute_metadata)",
                                "ok: [instance] => (item=placement)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=compute)",
                                "ok: [instance] => (item=baremetal)",
                                "ok: [instance] => (item=compute_novnc_proxy)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:39:43 +0000 (0:00:00.237)       0:34:31.307 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:39:43 +0000 (0:00:00.090)       0:34:31.398 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:39:43 +0000 (0:00:00.079)       0:34:31.478 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Generate temporary file for SSH public key] ***",
                                "Sunday 01 March 2026  21:39:44 +0000 (0:00:00.593)       0:34:32.071 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Write contents of current private SSH key] ****",
                                "Sunday 01 March 2026  21:39:44 +0000 (0:00:00.207)       0:34:32.279 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Generate public key for SSH private key] ******",
                                "Sunday 01 March 2026  21:39:44 +0000 (0:00:00.439)       0:34:32.718 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Delete temporary file for public SSH key] *****",
                                "Sunday 01 March 2026  21:39:45 +0000 (0:00:00.599)       0:34:33.318 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Remove nova-bootstrap and nova-cell-setup job] ***",
                                "Sunday 01 March 2026  21:39:45 +0000 (0:00:00.215)       0:34:33.533 **********",
                                "ok: [instance] => (item=nova-bootstrap)",
                                "ok: [instance] => (item=nova-cell-setup)",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Deploy Helm chart] ****************************",
                                "Sunday 01 March 2026  21:39:47 +0000 (0:00:01.423)       0:34:34.957 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  21:42:22 +0000 (0:02:34.791)       0:37:09.748 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  21:42:22 +0000 (0:00:00.145)       0:37:09.894 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  21:42:22 +0000 (0:00:00.071)       0:37:09.965 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  21:42:22 +0000 (0:00:00.063)       0:37:10.029 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress compute] **************************************************",
                                "Sunday 01 March 2026  21:42:22 +0000 (0:00:00.077)       0:37:10.107 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress compute] ********************",
                                "Sunday 01 March 2026  21:42:22 +0000 (0:00:00.106)       0:37:10.213 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  21:42:23 +0000 (0:00:01.279)       0:37:11.493 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  21:42:23 +0000 (0:00:00.127)       0:37:11.620 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  21:42:23 +0000 (0:00:00.071)       0:37:11.691 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  21:42:24 +0000 (0:00:00.063)       0:37:11.755 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress compute-novnc-proxy] **************************************",
                                "Sunday 01 March 2026  21:42:24 +0000 (0:00:00.097)       0:37:11.853 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress compute-novnc-proxy] ********",
                                "Sunday 01 March 2026  21:42:24 +0000 (0:00:00.096)       0:37:11.950 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Wait until compute api service ready] *********",
                                "Sunday 01 March 2026  21:42:25 +0000 (0:00:01.055)       0:37:13.005 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Create flavors] *******************************",
                                "Sunday 01 March 2026  21:42:25 +0000 (0:00:00.715)       0:37:13.721 **********",
                                "FAILED - RETRYING: [instance]: Create flavors (60 retries left).",
                                "FAILED - RETRYING: [instance]: Create flavors (59 retries left).",
                                "changed: [instance] => (item={'disk': 1, 'name': 'm1.tiny', 'ram': 512, 'vcpus': 1})",
                                "changed: [instance] => (item={'disk': 20, 'name': 'm1.small', 'ram': 2048, 'vcpus': 1})",
                                "changed: [instance] => (item={'disk': 40, 'name': 'm1.medium', 'ram': 4096, 'vcpus': 2})",
                                "changed: [instance] => (item={'disk': 80, 'name': 'm1.large', 'ram': 8192, 'vcpus': 4})",
                                "changed: [instance] => (item={'disk': 160, 'name': 'm1.xlarge', 'ram': 16384, 'vcpus': 8})",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:42:43 +0000 (0:00:17.554)       0:37:31.276 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:42:43 +0000 (0:00:00.124)       0:37:31.400 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  21:42:43 +0000 (0:00:00.100)       0:37:31.500 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  21:42:44 +0000 (0:00:00.714)       0:37:32.215 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  21:42:44 +0000 (0:00:00.061)       0:37:32.276 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  21:42:44 +0000 (0:00:00.062)       0:37:32.339 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  21:42:44 +0000 (0:00:00.061)       0:37:32.400 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:43:15 +0000 (0:00:30.907)       0:38:03.308 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:43:16 +0000 (0:00:00.754)       0:38:04.062 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:43:16 +0000 (0:00:00.072)       0:38:04.135 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:43:16 +0000 (0:00:00.081)       0:38:04.217 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:43:16 +0000 (0:00:00.071)       0:38:04.289 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:43:16 +0000 (0:00:00.065)       0:38:04.354 **********",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "ok: [instance] => (item=network)",
                                "ok: [instance] => (item=dns)",
                                "ok: [instance] => (item=load_balancer)",
                                "ok: [instance] => (item=compute_metadata)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=compute)",
                                "ok: [instance] => (item=baremetal)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:43:16 +0000 (0:00:00.204)       0:38:04.558 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:43:16 +0000 (0:00:00.087)       0:38:04.646 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:43:17 +0000 (0:00:00.096)       0:38:04.743 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Set external_dns_driver] *******************",
                                "Sunday 01 March 2026  21:43:17 +0000 (0:00:00.583)       0:38:05.326 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Generate Helm values] **********************",
                                "Sunday 01 March 2026  21:43:17 +0000 (0:00:00.066)       0:38:05.393 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Append Helm values] ************************",
                                "Sunday 01 March 2026  21:43:17 +0000 (0:00:00.309)       0:38:05.703 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Append Helm values (neutron_policy_server)] ***",
                                "Sunday 01 March 2026  21:43:18 +0000 (0:00:00.081)       0:38:05.785 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Deploy Helm chart] *************************",
                                "Sunday 01 March 2026  21:43:18 +0000 (0:00:00.082)       0:38:05.867 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  21:45:22 +0000 (0:02:04.803)       0:40:10.670 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  21:45:23 +0000 (0:00:00.136)       0:40:10.807 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  21:45:23 +0000 (0:00:00.070)       0:40:10.877 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  21:45:23 +0000 (0:00:00.072)       0:40:10.949 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress network] **************************************************",
                                "Sunday 01 March 2026  21:45:23 +0000 (0:00:00.076)       0:40:11.026 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress network] ********************",
                                "Sunday 01 March 2026  21:45:23 +0000 (0:00:00.105)       0:40:11.131 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Wait until network service ready] **********",
                                "Sunday 01 March 2026  21:45:24 +0000 (0:00:01.201)       0:40:12.332 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Create networks] ***************************",
                                "Sunday 01 March 2026  21:45:25 +0000 (0:00:00.800)       0:40:13.132 **********",
                                "FAILED - RETRYING: [instance]: Create networks (60 retries left).",
                                "FAILED - RETRYING: [instance]: Create networks (59 retries left).",
                                "changed: [instance] => (item={'external': True, 'mtu_size': 1500, 'name': 'public', 'port_security_enabled': True, 'provider_network_type': 'flat', 'provider_physical_network': 'external', 'shared': True, 'subnets': [{'allocation_pool_end': '10.96.250.220', 'allocation_pool_start': '10.96.250.200', 'cidr': '10.96.250.0/24', 'enable_dhcp': True, 'gateway_ip': '10.96.250.10', 'name': 'public-subnet'}]})",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Create subnets] ****************************",
                                "Sunday 01 March 2026  21:45:39 +0000 (0:00:14.575)       0:40:27.707 **********",
                                "changed: [instance] => (item=[{'external': True, 'mtu_size': 1500, 'name': 'public', 'port_security_enabled': True, 'provider_network_type': 'flat', 'provider_physical_network': 'external', 'shared': True}, {'allocation_pool_end': '10.96.250.220', 'allocation_pool_start': '10.96.250.200', 'cidr': '10.96.250.0/24', 'enable_dhcp': True, 'gateway_ip': '10.96.250.10', 'name': 'public-subnet'}])",
                                "",
                                "TASK [vexxhost.atmosphere.senlin : Remove OpenStack endpoints] *****************",
                                "Sunday 01 March 2026  21:45:42 +0000 (0:00:02.214)       0:40:29.922 **********",
                                "ok: [instance] => (item=public)",
                                "ok: [instance] => (item=admin)",
                                "ok: [instance] => (item=internal)",
                                "",
                                "TASK [vexxhost.atmosphere.senlin : Remove OpenStack service] *******************",
                                "Sunday 01 March 2026  21:45:45 +0000 (0:00:02.852)       0:40:32.774 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.senlin : Remove Ingress] *****************************",
                                "Sunday 01 March 2026  21:45:45 +0000 (0:00:00.882)       0:40:33.656 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.senlin : Remove Helm chart] **************************",
                                "Sunday 01 March 2026  21:45:46 +0000 (0:00:00.693)       0:40:34.350 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.senlin : Remove OpenStack user] **********************",
                                "Sunday 01 March 2026  21:45:47 +0000 (0:00:00.602)       0:40:34.952 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:45:48 +0000 (0:00:01.188)       0:40:36.140 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:45:48 +0000 (0:00:00.123)       0:40:36.264 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  21:45:48 +0000 (0:00:00.102)       0:40:36.367 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  21:45:49 +0000 (0:00:00.797)       0:40:37.165 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  21:45:49 +0000 (0:00:00.069)       0:40:37.234 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  21:45:49 +0000 (0:00:00.071)       0:40:37.305 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  21:45:49 +0000 (0:00:00.068)       0:40:37.374 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:46:20 +0000 (0:00:30.946)       0:41:08.320 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:46:21 +0000 (0:00:00.787)       0:41:09.107 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:46:21 +0000 (0:00:00.070)       0:41:09.178 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:46:21 +0000 (0:00:00.087)       0:41:09.265 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:46:21 +0000 (0:00:00.070)       0:41:09.336 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:46:21 +0000 (0:00:00.059)       0:41:09.396 **********",
                                "ok: [instance] => (item=cloudformation)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=orchestration)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:46:21 +0000 (0:00:00.122)       0:41:09.518 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:46:21 +0000 (0:00:00.078)       0:41:09.597 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:46:21 +0000 (0:00:00.086)       0:41:09.683 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.heat : Deploy Helm chart] ****************************",
                                "Sunday 01 March 2026  21:46:22 +0000 (0:00:00.596)       0:41:10.279 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  21:49:27 +0000 (0:03:04.817)       0:44:15.097 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  21:49:27 +0000 (0:00:00.130)       0:44:15.227 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  21:49:27 +0000 (0:00:00.073)       0:44:15.301 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  21:49:27 +0000 (0:00:00.076)       0:44:15.377 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress orchestration] ********************************************",
                                "Sunday 01 March 2026  21:49:27 +0000 (0:00:00.078)       0:44:15.456 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress orchestration] **************",
                                "Sunday 01 March 2026  21:49:27 +0000 (0:00:00.115)       0:44:15.571 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  21:49:28 +0000 (0:00:01.082)       0:44:16.654 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  21:49:29 +0000 (0:00:00.124)       0:44:16.779 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  21:49:29 +0000 (0:00:00.066)       0:44:16.845 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  21:49:29 +0000 (0:00:00.072)       0:44:16.917 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress cloudformation] *******************************************",
                                "Sunday 01 March 2026  21:49:29 +0000 (0:00:00.089)       0:44:17.007 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress cloudformation] *************",
                                "Sunday 01 March 2026  21:49:29 +0000 (0:00:00.088)       0:44:17.096 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/etc/nerdctl)] *********",
                                "Sunday 01 March 2026  21:49:30 +0000 (0:00:01.162)       0:44:18.259 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  21:49:30 +0000 (0:00:00.288)       0:44:18.547 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Sunday 01 March 2026  21:49:32 +0000 (0:00:01.220)       0:44:19.768 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:49:32 +0000 (0:00:00.269)       0:44:20.037 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/nerdctl/releases/download/v2.2.0/nerdctl-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:49:32 +0000 (0:00:00.106)       0:44:20.143 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:49:32 +0000 (0:00:00.522)       0:44:20.666 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.nerdctl : Create nerdctl config] *********************",
                                "Sunday 01 March 2026  21:49:35 +0000 (0:00:02.072)       0:44:22.738 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Uninstall OpenStack client system packages] ***",
                                "Sunday 01 March 2026  21:49:35 +0000 (0:00:00.504)       0:44:23.242 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Uninstall Ubuntu Cloud Archive keyring] ***",
                                "Sunday 01 March 2026  21:49:36 +0000 (0:00:00.768)       0:44:24.010 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Remove Ubuntu Cloud Archive repository] ***",
                                "Sunday 01 March 2026  21:49:37 +0000 (0:00:00.800)       0:44:24.811 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Generate OpenStack-Helm endpoints] ***************************************",
                                "Sunday 01 March 2026  21:49:37 +0000 (0:00:00.406)       0:44:25.218 **********",
                                "included: openstack_helm_endpoints for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:49:37 +0000 (0:00:00.165)       0:44:25.383 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:49:37 +0000 (0:00:00.069)       0:44:25.453 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:49:37 +0000 (0:00:00.058)       0:44:25.512 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:49:37 +0000 (0:00:00.063)       0:44:25.576 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:49:37 +0000 (0:00:00.063)       0:44:25.639 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:49:37 +0000 (0:00:00.055)       0:44:25.695 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:49:38 +0000 (0:00:00.057)       0:44:25.752 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:49:38 +0000 (0:00:00.075)       0:44:25.828 **********",
                                "ok: [instance] => (item=identity)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:49:38 +0000 (0:00:00.089)       0:44:25.917 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Generate openrc file] ****************",
                                "Sunday 01 March 2026  21:49:38 +0000 (0:00:00.089)       0:44:26.006 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Generate openstack aliases] **********",
                                "Sunday 01 March 2026  21:49:38 +0000 (0:00:00.474)       0:44:26.481 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:49:39 +0000 (0:00:00.538)       0:44:27.019 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:49:39 +0000 (0:00:00.109)       0:44:27.128 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  21:49:39 +0000 (0:00:00.110)       0:44:27.239 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  21:49:40 +0000 (0:00:00.688)       0:44:27.927 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  21:49:40 +0000 (0:00:00.069)       0:44:27.997 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  21:49:40 +0000 (0:00:00.068)       0:44:28.066 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  21:49:40 +0000 (0:00:00.065)       0:44:28.131 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:50:11 +0000 (0:00:30.915)       0:44:59.047 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:50:12 +0000 (0:00:00.725)       0:44:59.772 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:50:12 +0000 (0:00:00.068)       0:44:59.841 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:50:12 +0000 (0:00:00.084)       0:44:59.925 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:50:12 +0000 (0:00:00.071)       0:44:59.997 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:50:12 +0000 (0:00:00.066)       0:45:00.063 **********",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "ok: [instance] => (item=network)",
                                "ok: [instance] => (item=load_balancer)",
                                "ok: [instance] => (item=valkey)",
                                "ok: [instance] => (item=oslo_db_persistence)",
                                "ok: [instance] => (item=identity)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:50:12 +0000 (0:00:00.149)       0:45:00.213 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:50:12 +0000 (0:00:00.094)       0:45:00.308 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:50:12 +0000 (0:00:00.101)       0:45:00.409 **********",
                                "changed: [instance]",
                                "",
                                "TASK [atmosphere.common.secretgen_controller : Deploy secretgen-controller] ****",
                                "Sunday 01 March 2026  21:50:13 +0000 (0:00:00.568)       0:45:00.978 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create management network] *****************",
                                "Sunday 01 March 2026  21:50:14 +0000 (0:00:01.365)       0:45:02.343 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create management subnet] ******************",
                                "Sunday 01 March 2026  21:50:16 +0000 (0:00:02.371)       0:45:04.714 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create health manager security group] ******",
                                "Sunday 01 March 2026  21:50:18 +0000 (0:00:01.907)       0:45:06.622 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create health manager security group rules] ***",
                                "Sunday 01 March 2026  21:50:20 +0000 (0:00:01.361)       0:45:07.983 **********",
                                "changed: [instance] => (item={'protocol': 'udp', 'port': 5555})",
                                "changed: [instance] => (item={'protocol': 'udp', 'port': 10514})",
                                "changed: [instance] => (item={'protocol': 'udp', 'port': 20514})",
                                "changed: [instance] => (item={'protocol': 'tcp', 'port': 10514})",
                                "changed: [instance] => (item={'protocol': 'tcp', 'port': 20514})",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create health manager networking ports] ****",
                                "Sunday 01 March 2026  21:50:26 +0000 (0:00:05.891)       0:45:13.875 **********",
                                "changed: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Discover facts for other controllers] ******",
                                "Sunday 01 March 2026  21:50:27 +0000 (0:00:01.817)       0:45:15.692 **********",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Set binding for ports] *********************",
                                "Sunday 01 March 2026  21:50:30 +0000 (0:00:02.575)       0:45:18.267 **********",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Get health manager networking ports] *******",
                                "Sunday 01 March 2026  21:50:34 +0000 (0:00:04.256)       0:45:22.524 **********",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Set controller_ip_port_list] ***************",
                                "Sunday 01 March 2026  21:50:35 +0000 (0:00:01.067)       0:45:23.591 **********",
                                "ok: [instance] => (item=octavia-health-manager-port-instance)",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create amphora security group] *************",
                                "Sunday 01 March 2026  21:50:35 +0000 (0:00:00.095)       0:45:23.687 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create amphora security group rules] *******",
                                "Sunday 01 March 2026  21:50:37 +0000 (0:00:01.098)       0:45:24.785 **********",
                                "changed: [instance] => (item=[22, {'changed': False, 'ports': [{'allowed_address_pairs': [], 'binding_host_id': 'instance', 'binding_profile': {}, 'binding_vif_details': {'port_filter': True, 'connectivity': 'l2', 'bridge_name': 'br-int', 'datapath_type': 'system', 'bound_drivers': {'0': 'ovn'}}, 'binding_vif_type': 'ovs', 'binding_vnic_type': 'normal', 'created_at': '2026-03-01T21:50:27Z', 'data_plane_status': None, 'description': '', 'device_id': '', 'device_owner': 'octavia:health-mgr', 'device_profile': None, 'dns_assignment': [{'ip_address': '172.24.2.21', 'hostname': 'host-172-24-2-21', 'fqdn': 'host-172-24-2-21.openstacklocal.'}], 'dns_domain': '', 'dns_name': '', 'extra_dhcp_opts': [], 'fixed_ips': [{'subnet_id': '64e18f24-c519-4c88-a50c-38ef854b39a1', 'ip_address': '172.24.2.21'}], 'hardware_offload_type': None, 'ip_allocation': 'immediate', 'is_admin_state_up': True, 'is_port_security_enabled': True, 'mac_address': 'fa:16:3e:3d:ac:d9', 'name': 'octavia-health-manager-port-instance', 'network_id': 'bb482a7a-c46c-457a-a186-b44bfb1280f8', 'numa_affinity_policy': None, 'project_id': 'b6357918f73b458f9e03a5667d465a0e', 'tenant_id': 'b6357918f73b458f9e03a5667d465a0e', 'propagate_uplink_status': None, 'qos_network_policy_id': None, 'qos_policy_id': None, 'resource_request': None, 'security_group_ids': ['5ab21866-8cda-4495-bce8-e3077a52ee5a'], 'status': 'DOWN', 'trunk_details': None, 'trusted': None, 'updated_at': '2026-03-01T21:50:34Z', 'revision_number': 3, 'if_match': None, 'id': '0b032e00-fdd0-4807-8cd5-933cf322bf30', 'tags': []}], 'invocation': {'module_args': {'port': 'octavia-health-manager-port-instance', 'name': 'octavia-health-manager-port-instance', 'wait': True, 'timeout': 180, 'interface': 'public', 'sdk_log_level': 'INFO', 'auth_type': None, 'auth': None, 'region_name': None, 'validate_certs': None, 'ca_cert': None, 'client_cert': None, 'client_key': None, 'api_timeout': None, 'sdk_log_path': None, 'filters': None}}, 'failed': False, 'item': 'instance', 'ansible_loop_var': 'item'}])",
                                "changed: [instance] => (item=[9443, {'changed': False, 'ports': [{'allowed_address_pairs': [], 'binding_host_id': 'instance', 'binding_profile': {}, 'binding_vif_details': {'port_filter': True, 'connectivity': 'l2', 'bridge_name': 'br-int', 'datapath_type': 'system', 'bound_drivers': {'0': 'ovn'}}, 'binding_vif_type': 'ovs', 'binding_vnic_type': 'normal', 'created_at': '2026-03-01T21:50:27Z', 'data_plane_status': None, 'description': '', 'device_id': '', 'device_owner': 'octavia:health-mgr', 'device_profile': None, 'dns_assignment': [{'ip_address': '172.24.2.21', 'hostname': 'host-172-24-2-21', 'fqdn': 'host-172-24-2-21.openstacklocal.'}], 'dns_domain': '', 'dns_name': '', 'extra_dhcp_opts': [], 'fixed_ips': [{'subnet_id': '64e18f24-c519-4c88-a50c-38ef854b39a1', 'ip_address': '172.24.2.21'}], 'hardware_offload_type': None, 'ip_allocation': 'immediate', 'is_admin_state_up': True, 'is_port_security_enabled': True, 'mac_address': 'fa:16:3e:3d:ac:d9', 'name': 'octavia-health-manager-port-instance', 'network_id': 'bb482a7a-c46c-457a-a186-b44bfb1280f8', 'numa_affinity_policy': None, 'project_id': 'b6357918f73b458f9e03a5667d465a0e', 'tenant_id': 'b6357918f73b458f9e03a5667d465a0e', 'propagate_uplink_status': None, 'qos_network_policy_id': None, 'qos_policy_id': None, 'resource_request': None, 'security_group_ids': ['5ab21866-8cda-4495-bce8-e3077a52ee5a'], 'status': 'DOWN', 'trunk_details': None, 'trusted': None, 'updated_at': '2026-03-01T21:50:34Z', 'revision_number': 3, 'if_match': None, 'id': '0b032e00-fdd0-4807-8cd5-933cf322bf30', 'tags': []}], 'invocation': {'module_args': {'port': 'octavia-health-manager-port-instance', 'name': 'octavia-health-manager-port-instance', 'wait': True, 'timeout': 180, 'interface': 'public', 'sdk_log_level': 'INFO', 'auth_type': None, 'auth': None, 'region_name': None, 'validate_certs': None, 'ca_cert': None, 'client_cert': None, 'client_key': None, 'api_timeout': None, 'sdk_log_path': None, 'filters': None}}, 'failed': False, 'item': 'instance', 'ansible_loop_var': 'item'}])",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create amphora flavor] *********************",
                                "Sunday 01 March 2026  21:50:39 +0000 (0:00:02.268)       0:45:27.054 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Upload Amphora image] ****************************************************",
                                "Sunday 01 March 2026  21:50:40 +0000 (0:00:01.105)       0:45:28.160 **********",
                                "included: glance_image for instance",
                                "",
                                "TASK [vexxhost.atmosphere.qemu_utils : Install packages] ***********************",
                                "Sunday 01 March 2026  21:50:40 +0000 (0:00:00.180)       0:45:28.340 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************",
                                "Sunday 01 March 2026  21:50:41 +0000 (0:00:01.350)       0:45:29.690 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****",
                                "Sunday 01 March 2026  21:50:43 +0000 (0:00:01.399)       0:45:31.090 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Download image] ***********************",
                                "Sunday 01 March 2026  21:50:43 +0000 (0:00:00.238)       0:45:31.328 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Get image format] *********************",
                                "Sunday 01 March 2026  21:50:59 +0000 (0:00:15.455)       0:45:46.783 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***",
                                "Sunday 01 March 2026  21:50:59 +0000 (0:00:00.249)       0:45:47.033 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******",
                                "Sunday 01 March 2026  21:51:01 +0000 (0:00:02.512)       0:45:49.545 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************",
                                "Sunday 01 March 2026  21:51:02 +0000 (0:00:00.709)       0:45:50.254 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************",
                                "Sunday 01 March 2026  21:51:31 +0000 (0:00:29.210)       0:46:19.464 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Get Amphora image information] *************",
                                "Sunday 01 March 2026  21:51:32 +0000 (0:00:00.536)       0:46:20.001 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create Amphora SSH key] ********************",
                                "Sunday 01 March 2026  21:51:33 +0000 (0:00:00.926)       0:46:20.927 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Grab generated Amphora public key] *********",
                                "Sunday 01 March 2026  21:51:38 +0000 (0:00:05.746)       0:46:26.673 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Import Amphora SSH key-pair in OpenStack] ***",
                                "Sunday 01 March 2026  21:51:39 +0000 (0:00:00.698)       0:46:27.372 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create CAs & Issuers] **********************",
                                "Sunday 01 March 2026  21:51:41 +0000 (0:00:01.375)       0:46:28.748 **********",
                                "changed: [instance] => (item=octavia-client)",
                                "changed: [instance] => (item=octavia-server)",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create certificate for Octavia clients] ****",
                                "Sunday 01 March 2026  21:51:42 +0000 (0:00:01.440)       0:46:30.188 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create admin compute quotaset] *************",
                                "Sunday 01 March 2026  21:51:43 +0000 (0:00:00.699)       0:46:30.888 **********",
                                "[WARNING]: Loadbalancer service is not supported by your cloud. Ignoring",
                                "loadbalancer quotas.",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Deploy Helm chart] *************************",
                                "Sunday 01 March 2026  21:51:45 +0000 (0:00:01.878)       0:46:32.767 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Add implied roles] *************************",
                                "Sunday 01 March 2026  21:53:50 +0000 (0:02:05.129)       0:48:37.897 **********",
                                "changed: [instance] => (item={'role': 'member', 'implies': 'load-balancer_member'})",
                                "changed: [instance] => (item={'role': 'reader', 'implies': 'load-balancer_observer'})",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  21:53:57 +0000 (0:00:07.396)       0:48:45.293 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  21:53:57 +0000 (0:00:00.356)       0:48:45.649 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  21:53:57 +0000 (0:00:00.070)       0:48:45.719 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  21:53:58 +0000 (0:00:00.065)       0:48:45.785 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress load-balancer] ********************************************",
                                "Sunday 01 March 2026  21:53:58 +0000 (0:00:00.083)       0:48:45.869 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress load-balancer] **************",
                                "Sunday 01 March 2026  21:53:58 +0000 (0:00:00.099)       0:48:45.968 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:53:59 +0000 (0:00:01.158)       0:48:47.126 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:53:59 +0000 (0:00:00.103)       0:48:47.229 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  21:53:59 +0000 (0:00:00.106)       0:48:47.336 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  21:54:00 +0000 (0:00:00.705)       0:48:48.042 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  21:54:00 +0000 (0:00:00.068)       0:48:48.111 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  21:54:00 +0000 (0:00:00.067)       0:48:48.178 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  21:54:00 +0000 (0:00:00.064)       0:48:48.242 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  21:54:31 +0000 (0:00:30.888)       0:49:19.131 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  21:54:32 +0000 (0:00:00.666)       0:49:19.797 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  21:54:32 +0000 (0:00:00.073)       0:49:19.871 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  21:54:32 +0000 (0:00:00.075)       0:49:19.946 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  21:54:32 +0000 (0:00:00.076)       0:49:20.023 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  21:54:32 +0000 (0:00:00.062)       0:49:20.086 **********",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "ok: [instance] => (item=container_infra)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=key_manager)",
                                "ok: [instance] => (item=orchestration)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  21:54:32 +0000 (0:00:00.153)       0:49:20.239 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:54:32 +0000 (0:00:00.100)       0:49:20.340 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:54:32 +0000 (0:00:00.100)       0:49:20.440 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  21:54:33 +0000 (0:00:00.582)       0:49:21.023 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/kubernetes-sigs/cluster-api/releases/download/v1.10.5/clusterctl-linux-amd64\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  21:54:33 +0000 (0:00:00.082)       0:49:21.105 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  21:54:34 +0000 (0:00:01.501)       0:49:22.607 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.clusterctl : Create a configuration file] ************",
                                "Sunday 01 March 2026  21:54:34 +0000 (0:00:00.108)       0:49:22.716 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  21:54:35 +0000 (0:00:00.667)       0:49:23.383 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  21:54:35 +0000 (0:00:00.099)       0:49:23.482 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.openstack_resource_controller : Create build directory] ***",
                                "Sunday 01 March 2026  21:54:36 +0000 (0:00:00.564)       0:49:24.047 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.openstack_resource_controller : Upload Kustomization] ***",
                                "Sunday 01 March 2026  21:54:36 +0000 (0:00:00.255)       0:49:24.303 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.openstack_resource_controller : Generate manifests] ***",
                                "Sunday 01 March 2026  21:54:37 +0000 (0:00:00.586)       0:49:24.889 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.openstack_resource_controller : Apply manifest to cluster] ***",
                                "Sunday 01 March 2026  21:54:38 +0000 (0:00:00.978)       0:49:25.868 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Create a folder for the Cluster API providers] ***",
                                "Sunday 01 March 2026  21:54:41 +0000 (0:00:03.006)       0:49:28.875 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Copy over all provider configuration to the remote system] ***",
                                "Sunday 01 March 2026  21:54:41 +0000 (0:00:00.244)       0:49:29.119 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Get a list of all Cluster API providers] ***",
                                "Sunday 01 March 2026  21:54:43 +0000 (0:00:02.027)       0:49:31.146 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Initialize the management cluster] *****",
                                "Sunday 01 March 2026  21:54:44 +0000 (0:00:00.730)       0:49:31.876 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Parse provider resources into version mapping] ***",
                                "Sunday 01 March 2026  21:54:48 +0000 (0:00:04.371)       0:49:36.248 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Run upgrade if required] ***************",
                                "Sunday 01 March 2026  21:54:48 +0000 (0:00:00.064)       0:49:36.312 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Set node selector for Cluster API components] ***",
                                "Sunday 01 March 2026  21:54:48 +0000 (0:00:00.071)       0:49:36.383 **********",
                                "changed: [instance] => (item={'namespace': 'capi-kubeadm-bootstrap-system', 'name': 'capi-kubeadm-bootstrap-controller-manager'})",
                                "changed: [instance] => (item={'namespace': 'capi-kubeadm-control-plane-system', 'name': 'capi-kubeadm-control-plane-controller-manager'})",
                                "changed: [instance] => (item={'namespace': 'capi-system', 'name': 'capi-controller-manager'})",
                                "changed: [instance] => (item={'namespace': 'capo-system', 'name': 'capo-controller-manager'})",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Set default values for imagePullPolicy in kubeadmConfigSpec of CRDs] ***",
                                "Sunday 01 March 2026  21:55:46 +0000 (0:00:57.897)       0:50:34.281 **********",
                                "skipping: [instance] => (item={'crd': 'kubeadmcontrolplanetemplates.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/1/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/kubeadmConfigSpec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance] => (item={'crd': 'kubeadmcontrolplanetemplates.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/1/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/kubeadmConfigSpec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance] => (item={'crd': 'kubeadmconfigs.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance] => (item={'crd': 'kubeadmconfigs.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance] => (item={'crd': 'kubeadmconfigtemplates.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance] => (item={'crd': 'kubeadmconfigtemplates.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance] => (item={'crd': 'kubeadmcontrolplanes.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/kubeadmConfigSpec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance] => (item={'crd': 'kubeadmcontrolplanes.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/kubeadmConfigSpec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Set CAPO instance creation timeout] ****",
                                "Sunday 01 March 2026  21:55:46 +0000 (0:00:00.102)       0:50:34.383 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.magnum : Deploy Cluster API for Magnum RBAC] *********",
                                "Sunday 01 March 2026  21:56:02 +0000 (0:00:16.140)       0:50:50.524 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.magnum : Deploy Helm chart] **************************",
                                "Sunday 01 March 2026  21:56:03 +0000 (0:00:00.756)       0:50:51.280 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.magnum : Deploy \"magnum-cluster-api-proxy\"] **********",
                                "Sunday 01 March 2026  21:58:11 +0000 (0:02:07.857)       0:52:59.138 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  21:58:12 +0000 (0:00:00.814)       0:52:59.953 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  21:58:12 +0000 (0:00:00.153)       0:53:00.107 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  21:58:12 +0000 (0:00:00.087)       0:53:00.194 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  21:58:12 +0000 (0:00:00.077)       0:53:00.272 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress container-infra] ******************************************",
                                "Sunday 01 March 2026  21:58:12 +0000 (0:00:00.088)       0:53:00.361 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress container-infra] ************",
                                "Sunday 01 March 2026  21:58:12 +0000 (0:00:00.105)       0:53:00.466 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.magnum : Deploy magnum registry] *********************",
                                "Sunday 01 March 2026  21:58:13 +0000 (0:00:01.200)       0:53:01.667 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Create magnum registry Ingress] ******************************************",
                                "Sunday 01 March 2026  21:58:14 +0000 (0:00:00.850)       0:53:02.517 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  21:58:14 +0000 (0:00:00.149)       0:53:02.667 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  21:58:15 +0000 (0:00:00.078)       0:53:02.746 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  21:58:15 +0000 (0:00:00.071)       0:53:02.818 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress container-infra-registry] *********************************",
                                "Sunday 01 March 2026  21:58:15 +0000 (0:00:00.076)       0:53:02.894 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress container-infra-registry] ***",
                                "Sunday 01 March 2026  21:58:15 +0000 (0:00:00.095)       0:53:02.990 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Upload images] ***********************************************************",
                                "Sunday 01 March 2026  21:58:16 +0000 (0:00:01.379)       0:53:04.370 **********",
                                "included: glance_image for instance => (item={'name': 'ubuntu-2204-kube-v1.34.3', 'url': 'https://github.com/vexxhost/capo-image-elements/releases/download/2025.12-3/ubuntu-22.04-v1.34.3.qcow2', 'distro': 'ubuntu'})",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************",
                                "Sunday 01 March 2026  21:58:16 +0000 (0:00:00.183)       0:53:04.554 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****",
                                "Sunday 01 March 2026  21:58:17 +0000 (0:00:01.017)       0:53:05.571 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Download image] ***********************",
                                "Sunday 01 March 2026  21:58:18 +0000 (0:00:00.256)       0:53:05.828 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Get image format] *********************",
                                "Sunday 01 March 2026  21:58:28 +0000 (0:00:10.025)       0:53:15.854 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***",
                                "Sunday 01 March 2026  21:58:28 +0000 (0:00:00.293)       0:53:16.147 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******",
                                "Sunday 01 March 2026  21:58:28 +0000 (0:00:00.074)       0:53:16.221 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************",
                                "Sunday 01 March 2026  21:58:29 +0000 (0:00:00.779)       0:53:17.000 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************",
                                "Sunday 01 March 2026  21:59:04 +0000 (0:00:35.316)       0:53:52.317 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  21:59:05 +0000 (0:00:00.485)       0:53:52.802 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  21:59:05 +0000 (0:00:00.102)       0:53:52.904 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  21:59:05 +0000 (0:00:00.129)       0:53:53.033 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  21:59:05 +0000 (0:00:00.692)       0:53:53.726 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  21:59:06 +0000 (0:00:00.068)       0:53:53.795 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  21:59:06 +0000 (0:00:00.065)       0:53:53.861 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  21:59:06 +0000 (0:00:00.070)       0:53:53.931 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:00:01 +0000 (0:00:54.979)       0:54:48.910 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:00:01 +0000 (0:00:00.688)       0:54:49.598 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:00:01 +0000 (0:00:00.068)       0:54:49.667 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:00:02 +0000 (0:00:00.071)       0:54:49.738 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:00:02 +0000 (0:00:00.080)       0:54:49.819 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:00:02 +0000 (0:00:00.086)       0:54:49.905 **********",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "ok: [instance] => (item=sharev2)",
                                "ok: [instance] => (item=identity)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:00:02 +0000 (0:00:00.127)       0:54:50.033 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:00:02 +0000 (0:00:00.106)       0:54:50.139 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:00:02 +0000 (0:00:00.098)       0:54:50.237 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Create flavor] ******************************",
                                "Sunday 01 March 2026  22:00:03 +0000 (0:00:00.568)       0:54:50.805 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Upload service image] ****************************************************",
                                "Sunday 01 March 2026  22:00:06 +0000 (0:00:03.064)       0:54:53.870 **********",
                                "included: glance_image for instance",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************",
                                "Sunday 01 March 2026  22:00:06 +0000 (0:00:00.351)       0:54:54.222 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****",
                                "Sunday 01 March 2026  22:00:07 +0000 (0:00:00.960)       0:54:55.183 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Download image] ***********************",
                                "Sunday 01 March 2026  22:00:07 +0000 (0:00:00.248)       0:54:55.431 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Get image format] *********************",
                                "Sunday 01 March 2026  22:00:17 +0000 (0:00:09.464)       0:55:04.896 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***",
                                "Sunday 01 March 2026  22:00:17 +0000 (0:00:00.557)       0:55:05.454 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******",
                                "Sunday 01 March 2026  22:00:17 +0000 (0:00:00.073)       0:55:05.527 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************",
                                "Sunday 01 March 2026  22:00:18 +0000 (0:00:00.754)       0:55:06.282 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************",
                                "Sunday 01 March 2026  22:00:35 +0000 (0:00:16.593)       0:55:22.875 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Create generic share driver security group] ***",
                                "Sunday 01 March 2026  22:00:35 +0000 (0:00:00.393)       0:55:23.268 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Create generic share driver security group tcp rules] ***",
                                "Sunday 01 March 2026  22:00:37 +0000 (0:00:01.573)       0:55:24.841 **********",
                                "changed: [instance] => (item=22)",
                                "changed: [instance] => (item=111)",
                                "changed: [instance] => (item=2049)",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Create generic share driver security group icmp rules] ***",
                                "Sunday 01 March 2026  22:00:40 +0000 (0:00:03.881)       0:55:28.722 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Generate temporary file for SSH public key] ***",
                                "Sunday 01 March 2026  22:00:42 +0000 (0:00:01.307)       0:55:30.030 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Write contents of current private SSH key] ***",
                                "Sunday 01 March 2026  22:00:42 +0000 (0:00:00.207)       0:55:30.238 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Generate public key for SSH private key] ****",
                                "Sunday 01 March 2026  22:00:42 +0000 (0:00:00.416)       0:55:30.654 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Delete temporary file for public SSH key] ***",
                                "Sunday 01 March 2026  22:00:43 +0000 (0:00:00.356)       0:55:31.010 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Create secret with the SSH keys] ************",
                                "Sunday 01 March 2026  22:00:43 +0000 (0:00:00.214)       0:55:31.225 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Deploy Helm chart] **************************",
                                "Sunday 01 March 2026  22:00:44 +0000 (0:00:00.690)       0:55:31.916 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:03:09 +0000 (0:02:25.473)       0:57:57.389 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:03:09 +0000 (0:00:00.172)       0:57:57.562 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:03:09 +0000 (0:00:00.071)       0:57:57.634 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:03:09 +0000 (0:00:00.074)       0:57:57.709 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress sharev2] **************************************************",
                                "Sunday 01 March 2026  22:03:10 +0000 (0:00:00.094)       0:57:57.803 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress sharev2] ********************",
                                "Sunday 01 March 2026  22:03:10 +0000 (0:00:00.112)       0:57:57.916 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Update service tenant quotas] ***************",
                                "Sunday 01 March 2026  22:03:11 +0000 (0:00:01.268)       0:57:59.184 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:03:13 +0000 (0:00:02.170)       0:58:01.355 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:03:13 +0000 (0:00:00.102)       0:58:01.457 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:03:13 +0000 (0:00:00.069)       0:58:01.527 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:03:13 +0000 (0:00:00.070)       0:58:01.597 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:03:13 +0000 (0:00:00.070)       0:58:01.668 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:03:14 +0000 (0:00:00.080)       0:58:01.748 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:03:14 +0000 (0:00:00.077)       0:58:01.825 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:03:14 +0000 (0:00:00.248)       0:58:02.074 **********",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=dashboard)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:03:14 +0000 (0:00:00.113)       0:58:02.187 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:03:14 +0000 (0:00:00.098)       0:58:02.285 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:03:14 +0000 (0:00:00.093)       0:58:02.379 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.horizon : Deploy Helm chart] *************************",
                                "Sunday 01 March 2026  22:03:15 +0000 (0:00:00.610)       0:58:02.989 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:03:46 +0000 (0:00:31.195)       0:58:34.184 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:03:46 +0000 (0:00:00.163)       0:58:34.348 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:03:46 +0000 (0:00:00.074)       0:58:34.422 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:03:46 +0000 (0:00:00.072)       0:58:34.494 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress dashboard] ************************************************",
                                "Sunday 01 March 2026  22:03:46 +0000 (0:00:00.098)       0:58:34.593 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress dashboard] ******************",
                                "Sunday 01 March 2026  22:03:46 +0000 (0:00:00.123)       0:58:34.716 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_exporter : Deploy service] *****************",
                                "Sunday 01 March 2026  22:03:48 +0000 (0:00:01.214)       0:58:35.930 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_exporter : Fetch Neutron DB secret] ********",
                                "Sunday 01 March 2026  22:03:49 +0000 (0:00:00.863)       0:58:36.794 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_exporter : Fetch Nova DB secret] ***********",
                                "Sunday 01 March 2026  22:03:49 +0000 (0:00:00.723)       0:58:37.517 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_exporter : Fetch Octavia DB secret] ********",
                                "Sunday 01 March 2026  22:03:50 +0000 (0:00:00.707)       0:58:38.225 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_exporter : Create \"openstack-database-exporter-dsn\" secret] ***",
                                "Sunday 01 March 2026  22:03:51 +0000 (0:00:00.705)       0:58:38.931 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_exporter : Deploy service] *****************",
                                "Sunday 01 March 2026  22:03:51 +0000 (0:00:00.779)       0:58:39.711 **********",
                                "changed: [instance]",
                                "",
                                "PLAY [controllers] *************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:03:52 +0000 (0:00:00.811)       0:58:40.523 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/etc/nerdctl)] *********",
                                "Sunday 01 March 2026  22:03:57 +0000 (0:00:04.407)       0:58:44.930 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  22:03:57 +0000 (0:00:00.481)       0:58:45.412 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Sunday 01 March 2026  22:03:59 +0000 (0:00:01.341)       0:58:46.754 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:03:59 +0000 (0:00:00.316)       0:58:47.070 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/nerdctl/releases/download/v2.2.0/nerdctl-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:03:59 +0000 (0:00:00.120)       0:58:47.191 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:03:59 +0000 (0:00:00.400)       0:58:47.592 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.nerdctl : Create nerdctl config] *********************",
                                "Sunday 01 March 2026  22:04:01 +0000 (0:00:01.146)       0:58:48.738 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Uninstall OpenStack client system packages] ***",
                                "Sunday 01 March 2026  22:04:01 +0000 (0:00:00.518)       0:58:49.257 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Uninstall Ubuntu Cloud Archive keyring] ***",
                                "Sunday 01 March 2026  22:04:02 +0000 (0:00:00.780)       0:58:50.038 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Remove Ubuntu Cloud Archive repository] ***",
                                "Sunday 01 March 2026  22:04:03 +0000 (0:00:00.922)       0:58:50.960 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Generate OpenStack-Helm endpoints] ***************************************",
                                "Sunday 01 March 2026  22:04:03 +0000 (0:00:00.417)       0:58:51.377 **********",
                                "included: openstack_helm_endpoints for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:04:03 +0000 (0:00:00.198)       0:58:51.576 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:04:03 +0000 (0:00:00.083)       0:58:51.659 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:04:04 +0000 (0:00:00.072)       0:58:51.732 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:04:04 +0000 (0:00:00.070)       0:58:51.802 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:04:04 +0000 (0:00:00.072)       0:58:51.874 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:04:04 +0000 (0:00:00.069)       0:58:51.944 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:04:04 +0000 (0:00:00.074)       0:58:52.019 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:04:04 +0000 (0:00:00.074)       0:58:52.094 **********",
                                "ok: [instance] => (item=identity)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:04:04 +0000 (0:00:00.093)       0:58:52.187 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Generate openrc file] ****************",
                                "Sunday 01 March 2026  22:04:04 +0000 (0:00:00.111)       0:58:52.299 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Generate openstack aliases] **********",
                                "Sunday 01 March 2026  22:04:05 +0000 (0:00:00.521)       0:58:52.820 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [Configure networking] ****************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:04:05 +0000 (0:00:00.581)       0:58:53.402 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Add IP address to \"br-ex\"] ***********************************************",
                                "Sunday 01 March 2026  22:04:10 +0000 (0:00:04.423)       0:58:57.825 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Set \"br-ex\" interface to \"up\"] *******************************************",
                                "Sunday 01 March 2026  22:04:10 +0000 (0:00:00.255)       0:58:58.081 **********",
                                "ok: [instance]",
                                "",
                                "PLAY RECAP *********************************************************************",
                                "instance                   : ok=785  changed=282  unreachable=0    failed=0    skipped=252  rescued=0    ignored=2",
                                "",
                                "Sunday 01 March 2026  22:04:10 +0000 (0:00:00.218)       0:58:58.299 **********",
                                "===============================================================================",
                                "vexxhost.atmosphere.cinder : Deploy Helm chart ------------------------ 282.76s",
                                "vexxhost.atmosphere.heat : Deploy Helm chart -------------------------- 184.82s",
                                "vexxhost.atmosphere.keystone : Deploy Helm chart ---------------------- 159.02s",
                                "vexxhost.atmosphere.nova : Deploy Helm chart -------------------------- 154.79s",
                                "vexxhost.atmosphere.keycloak : Deploy Helm chart ---------------------- 153.36s",
                                "vexxhost.atmosphere.manila : Deploy Helm chart ------------------------ 145.47s",
                                "vexxhost.atmosphere.magnum : Deploy Helm chart ------------------------ 127.86s",
                                "vexxhost.atmosphere.glance : Deploy Helm chart ------------------------ 126.72s",
                                "vexxhost.atmosphere.octavia : Deploy Helm chart ----------------------- 125.13s",
                                "vexxhost.atmosphere.neutron : Deploy Helm chart ----------------------- 124.80s",
                                "vexxhost.ceph.mon : Run Bootstrap coomand ----------------------------- 123.51s",
                                "vexxhost.atmosphere.percona_xtradb_cluster : Apply Percona XtraDB cluster -- 91.58s",
                                "vexxhost.atmosphere.barbican : Deploy Helm chart ----------------------- 91.04s",
                                "vexxhost.ceph.osd : Install OSDs --------------------------------------- 87.99s",
                                "vexxhost.atmosphere.placement : Deploy Helm chart ---------------------- 78.41s",
                                "vexxhost.atmosphere.rabbitmq : Deploy cluster -------------------------- 60.98s",
                                "vexxhost.kubernetes.cluster_api : Set node selector for Cluster API components -- 57.90s",
                                "vexxhost.atmosphere.rabbitmq : Deploy cluster -------------------------- 54.98s",
                                "vexxhost.atmosphere.glance_image : Upload image into Glance ------------ 35.32s",
                                "vexxhost.atmosphere.horizon : Deploy Helm chart ------------------------ 31.20s",
                                "INFO     [aio > converge] Executed: Successful",
                                "INFO     [aio > idempotence] Executing",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:04:13 +0000 (0:00:00.017)       0:00:00.017 **********",
                                "[WARNING]: Platform linux on host instance is using the discovered Python",
                                "interpreter at /usr/bin/python3.10, but future installation of another Python",
                                "interpreter could change the meaning of that path. See",
                                "https://docs.ansible.com/ansible-",
                                "core/2.17/reference_appendices/interpreter_discovery.html for more information.",
                                "ok: [instance]",
                                "",
                                "TASK [Set a fact with the \"atmosphere_images\" for other plays] *****************",
                                "Sunday 01 March 2026  22:04:18 +0000 (0:00:05.001)       0:00:05.018 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [Deploy Ceph monitors & managers] *****************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:04:18 +0000 (0:00:00.230)       0:00:05.249 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  22:04:22 +0000 (0:00:03.964)       0:00:09.214 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  22:04:23 +0000 (0:00:00.445)       0:00:09.659 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Sunday 01 March 2026  22:04:23 +0000 (0:00:00.189)       0:00:09.848 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:04:23 +0000 (0:00:00.401)       0:00:10.250 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:04:24 +0000 (0:00:00.110)       0:00:10.360 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:04:24 +0000 (0:00:00.473)       0:00:10.834 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  22:04:24 +0000 (0:00:00.144)       0:00:10.979 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  22:04:24 +0000 (0:00:00.146)       0:00:11.126 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  22:04:25 +0000 (0:00:00.283)       0:00:11.409 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:04:26 +0000 (0:00:01.472)       0:00:12.882 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:04:26 +0000 (0:00:00.137)       0:00:13.019 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:04:27 +0000 (0:00:00.530)       0:00:13.550 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Sunday 01 March 2026  22:04:29 +0000 (0:00:02.584)       0:00:16.134 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Sunday 01 March 2026  22:04:29 +0000 (0:00:00.079)       0:00:16.213 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Sunday 01 March 2026  22:04:29 +0000 (0:00:00.079)       0:00:16.292 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Sunday 01 March 2026  22:04:30 +0000 (0:00:00.085)       0:00:16.378 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Sunday 01 March 2026  22:04:31 +0000 (0:00:01.257)       0:00:17.635 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Sunday 01 March 2026  22:04:31 +0000 (0:00:00.498)       0:00:18.134 **********",
                                "ok: [instance] => (item={'path': '/etc/containerd'})",
                                "ok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Sunday 01 March 2026  22:04:32 +0000 (0:00:00.962)       0:00:19.096 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Sunday 01 March 2026  22:04:33 +0000 (0:00:00.727)       0:00:19.824 **********",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Sunday 01 March 2026  22:04:33 +0000 (0:00:00.036)       0:00:19.860 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  22:04:34 +0000 (0:00:00.725)       0:00:20.586 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:04:34 +0000 (0:00:00.299)       0:00:20.885 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:04:34 +0000 (0:00:00.114)       0:00:21.000 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:04:35 +0000 (0:00:00.542)       0:00:21.543 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Install AppArmor packages] ******************",
                                "Sunday 01 March 2026  22:04:38 +0000 (0:00:03.268)       0:00:24.811 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************",
                                "Sunday 01 March 2026  22:04:39 +0000 (0:00:01.237)       0:00:26.049 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd service file for docker] *****",
                                "Sunday 01 March 2026  22:04:40 +0000 (0:00:00.315)       0:00:26.365 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create folders for configuration] ***********",
                                "Sunday 01 March 2026  22:04:40 +0000 (0:00:00.490)       0:00:26.855 **********",
                                "ok: [instance] => (item={'path': '/etc/docker'})",
                                "ok: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})",
                                "ok: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd socket file for docker] ******",
                                "Sunday 01 March 2026  22:04:41 +0000 (0:00:00.618)       0:00:27.474 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create docker daemon config file] ***********",
                                "Sunday 01 March 2026  22:04:41 +0000 (0:00:00.494)       0:00:27.969 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Force any restarts if necessary] ************",
                                "Sunday 01 March 2026  22:04:42 +0000 (0:00:00.470)       0:00:28.440 **********",
                                "",
                                "TASK [vexxhost.containers.docker : Enable and start service] *******************",
                                "Sunday 01 March 2026  22:04:42 +0000 (0:00:00.044)       0:00:28.485 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******",
                                "Sunday 01 March 2026  22:04:42 +0000 (0:00:00.460)       0:00:28.946 **********",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Install packages] ********************************",
                                "Sunday 01 March 2026  22:04:42 +0000 (0:00:00.142)       0:00:29.088 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure services are started] *********************",
                                "Sunday 01 March 2026  22:04:43 +0000 (0:00:01.234)       0:00:30.323 **********",
                                "ok: [instance] => (item=chronyd)",
                                "ok: [instance] => (item=sshd)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************",
                                "Sunday 01 March 2026  22:04:44 +0000 (0:00:00.766)       0:00:31.090 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************",
                                "Sunday 01 March 2026  22:04:45 +0000 (0:00:00.367)       0:00:31.458 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************",
                                "Sunday 01 March 2026  22:04:45 +0000 (0:00:00.297)       0:00:31.756 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***",
                                "Sunday 01 March 2026  22:04:45 +0000 (0:00:00.528)       0:00:32.284 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Get `cephadm ls` status] *****************************",
                                "Sunday 01 March 2026  22:04:46 +0000 (0:00:00.644)       0:00:32.928 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Parse the `cephadm ls` output] ***********************",
                                "Sunday 01 March 2026  22:04:52 +0000 (0:00:05.700)       0:00:38.629 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Assimilate existing configs in `ceph.conf`] **********",
                                "Sunday 01 March 2026  22:04:52 +0000 (0:00:00.106)       0:00:38.735 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Adopt monitor to cluster] ****************************",
                                "Sunday 01 March 2026  22:04:52 +0000 (0:00:00.086)       0:00:38.821 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Adopt manager to cluster] ****************************",
                                "Sunday 01 March 2026  22:04:52 +0000 (0:00:00.074)       0:00:38.896 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Enable \"cephadm\" mgr module] *************************",
                                "Sunday 01 March 2026  22:04:52 +0000 (0:00:00.073)       0:00:38.970 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Set orchestrator backend to \"cephadm\"] ***************",
                                "Sunday 01 March 2026  22:04:52 +0000 (0:00:00.083)       0:00:39.053 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Use `cephadm` user for cephadm] **********************",
                                "Sunday 01 March 2026  22:04:52 +0000 (0:00:00.083)       0:00:39.136 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Generate \"cephadm\" key] ******************************",
                                "Sunday 01 March 2026  22:04:52 +0000 (0:00:00.076)       0:00:39.213 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Set Ceph Monitor IP address] *************************",
                                "Sunday 01 March 2026  22:04:52 +0000 (0:00:00.081)       0:00:39.294 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Check if any node is bootstrapped] *******************",
                                "Sunday 01 March 2026  22:04:53 +0000 (0:00:00.120)       0:00:39.415 **********",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.ceph.mon : Select pre-existing bootstrap node if exists] ********",
                                "Sunday 01 March 2026  22:04:53 +0000 (0:00:00.339)       0:00:39.754 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Bootstrap cluster] ***********************************",
                                "Sunday 01 March 2026  22:04:53 +0000 (0:00:00.114)       0:00:39.868 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [Install Ceph host] *******************************************************",
                                "Sunday 01 March 2026  22:04:53 +0000 (0:00:00.083)       0:00:39.952 **********",
                                "included: vexxhost.ceph.cephadm_host for instance",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******",
                                "Sunday 01 March 2026  22:04:53 +0000 (0:00:00.148)       0:00:40.100 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***",
                                "Sunday 01 March 2026  22:04:55 +0000 (0:00:02.200)       0:00:42.300 **********",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********",
                                "Sunday 01 March 2026  22:04:56 +0000 (0:00:00.142)       0:00:42.443 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************",
                                "Sunday 01 March 2026  22:04:56 +0000 (0:00:00.522)       0:00:42.965 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Configure \"mon\" label for monitors] ******************",
                                "Sunday 01 March 2026  22:04:59 +0000 (0:00:02.596)       0:00:45.561 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mon : Validate monitor exist] ******************************",
                                "Sunday 01 March 2026  22:05:01 +0000 (0:00:02.291)       0:00:47.853 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Install Ceph host] *******************************************************",
                                "Sunday 01 March 2026  22:05:12 +0000 (0:00:10.866)       0:00:58.719 **********",
                                "included: vexxhost.ceph.cephadm_host for instance",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******",
                                "Sunday 01 March 2026  22:05:12 +0000 (0:00:00.148)       0:00:58.868 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***",
                                "Sunday 01 March 2026  22:05:12 +0000 (0:00:00.124)       0:00:58.993 **********",
                                "skipping: [instance] => (item=instance)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********",
                                "Sunday 01 March 2026  22:05:12 +0000 (0:00:00.134)       0:00:59.127 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************",
                                "Sunday 01 March 2026  22:05:13 +0000 (0:00:00.303)       0:00:59.430 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mgr : Configure \"mgr\" label for managers] ******************",
                                "Sunday 01 March 2026  22:05:15 +0000 (0:00:02.882)       0:01:02.313 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mgr : Validate manager exist] ******************************",
                                "Sunday 01 March 2026  22:05:18 +0000 (0:00:02.202)       0:01:04.515 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.mgr : Enable the Ceph Manager prometheus module] ***********",
                                "Sunday 01 March 2026  22:05:20 +0000 (0:00:02.171)       0:01:06.687 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [Deploy Ceph OSDs] ********************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:05:23 +0000 (0:00:03.060)       0:01:09.747 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  22:05:27 +0000 (0:00:04.097)       0:01:13.845 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  22:05:27 +0000 (0:00:00.299)       0:01:14.144 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Sunday 01 March 2026  22:05:27 +0000 (0:00:00.110)       0:01:14.255 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:05:28 +0000 (0:00:00.294)       0:01:14.550 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:05:28 +0000 (0:00:00.115)       0:01:14.666 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:05:28 +0000 (0:00:00.413)       0:01:15.079 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  22:05:28 +0000 (0:00:00.150)       0:01:15.230 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  22:05:29 +0000 (0:00:00.298)       0:01:15.528 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  22:05:29 +0000 (0:00:00.294)       0:01:15.822 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:05:30 +0000 (0:00:01.486)       0:01:17.308 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:05:31 +0000 (0:00:00.152)       0:01:17.460 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:05:31 +0000 (0:00:00.495)       0:01:17.956 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Sunday 01 March 2026  22:05:33 +0000 (0:00:02.138)       0:01:20.094 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Sunday 01 March 2026  22:05:33 +0000 (0:00:00.089)       0:01:20.184 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Sunday 01 March 2026  22:05:33 +0000 (0:00:00.090)       0:01:20.275 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Sunday 01 March 2026  22:05:34 +0000 (0:00:00.087)       0:01:20.362 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Sunday 01 March 2026  22:05:35 +0000 (0:00:01.208)       0:01:21.571 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Sunday 01 March 2026  22:05:35 +0000 (0:00:00.554)       0:01:22.125 **********",
                                "ok: [instance] => (item={'path': '/etc/containerd'})",
                                "ok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Sunday 01 March 2026  22:05:36 +0000 (0:00:01.012)       0:01:23.137 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Sunday 01 March 2026  22:05:37 +0000 (0:00:00.616)       0:01:23.754 **********",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Sunday 01 March 2026  22:05:37 +0000 (0:00:00.035)       0:01:23.789 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  22:05:37 +0000 (0:00:00.501)       0:01:24.291 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:05:38 +0000 (0:00:00.289)       0:01:24.580 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://download.docker.com/linux/static/stable/x86_64/docker-24.0.9.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:05:38 +0000 (0:00:00.110)       0:01:24.691 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:05:38 +0000 (0:00:00.449)       0:01:25.141 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Install AppArmor packages] ******************",
                                "Sunday 01 March 2026  22:05:42 +0000 (0:00:03.355)       0:01:28.496 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Ensure group \"docker\" exists] ***************",
                                "Sunday 01 March 2026  22:05:43 +0000 (0:00:01.502)       0:01:29.999 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd service file for docker] *****",
                                "Sunday 01 March 2026  22:05:43 +0000 (0:00:00.241)       0:01:30.240 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create folders for configuration] ***********",
                                "Sunday 01 March 2026  22:05:44 +0000 (0:00:00.509)       0:01:30.749 **********",
                                "ok: [instance] => (item={'path': '/etc/docker'})",
                                "ok: [instance] => (item={'path': '/var/lib/docker', 'mode': '0o710'})",
                                "ok: [instance] => (item={'path': '/run/docker', 'mode': '0o711'})",
                                "",
                                "TASK [vexxhost.containers.docker : Create systemd socket file for docker] ******",
                                "Sunday 01 March 2026  22:05:45 +0000 (0:00:00.658)       0:01:31.408 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Create docker daemon config file] ***********",
                                "Sunday 01 March 2026  22:05:45 +0000 (0:00:00.510)       0:01:31.919 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.docker : Force any restarts if necessary] ************",
                                "Sunday 01 March 2026  22:05:46 +0000 (0:00:00.519)       0:01:32.438 **********",
                                "",
                                "TASK [vexxhost.containers.docker : Enable and start service] *******************",
                                "Sunday 01 March 2026  22:05:46 +0000 (0:00:00.044)       0:01:32.483 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Gather variables for each operating system] ******",
                                "Sunday 01 March 2026  22:05:46 +0000 (0:00:00.489)       0:01:32.972 **********",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/cephadm/vars/ubuntu-22.04.yml)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Install packages] ********************************",
                                "Sunday 01 March 2026  22:05:46 +0000 (0:00:00.147)       0:01:33.120 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure services are started] *********************",
                                "Sunday 01 March 2026  22:05:48 +0000 (0:00:01.253)       0:01:34.373 **********",
                                "ok: [instance] => (item=chronyd)",
                                "ok: [instance] => (item=sshd)",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Download \"cephadm\"] ******************************",
                                "Sunday 01 March 2026  22:05:48 +0000 (0:00:00.800)       0:01:35.174 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Remove cephadm from old path] ********************",
                                "Sunday 01 March 2026  22:05:49 +0000 (0:00:00.355)       0:01:35.530 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Ensure \"cephadm\" user is present] ****************",
                                "Sunday 01 March 2026  22:05:49 +0000 (0:00:00.244)       0:01:35.775 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm : Allow \"cephadm\" user to have passwordless sudo] ***",
                                "Sunday 01 March 2026  22:05:49 +0000 (0:00:00.295)       0:01:36.070 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get monitor status] **********************************",
                                "Sunday 01 March 2026  22:05:50 +0000 (0:00:00.282)       0:01:36.353 **********",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.ceph.osd : Select admin host] ***********************************",
                                "Sunday 01 March 2026  22:05:50 +0000 (0:00:00.328)       0:01:36.681 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get `cephadm ls` status] *****************************",
                                "Sunday 01 March 2026  22:05:50 +0000 (0:00:00.093)       0:01:36.775 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Parse the `cephadm ls` output] ***********************",
                                "Sunday 01 March 2026  22:05:56 +0000 (0:00:05.570)       0:01:42.345 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Install Ceph host] *******************************************************",
                                "Sunday 01 March 2026  22:05:56 +0000 (0:00:00.114)       0:01:42.460 **********",
                                "included: vexxhost.ceph.cephadm_host for instance",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Get public SSH key for \"cephadm\" user] ******",
                                "Sunday 01 March 2026  22:05:56 +0000 (0:00:00.135)       0:01:42.595 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set fact with public SSH key for \"cephadm\" user] ***",
                                "Sunday 01 March 2026  22:05:56 +0000 (0:00:00.125)       0:01:42.721 **********",
                                "skipping: [instance] => (item=instance)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Set authorized key for \"cephadm\"] ***********",
                                "Sunday 01 March 2026  22:05:56 +0000 (0:00:00.117)       0:01:42.839 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.cephadm_host : Add new host to Ceph] ***********************",
                                "Sunday 01 March 2026  22:05:56 +0000 (0:00:00.330)       0:01:43.169 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Adopt OSDs to cluster] *******************************",
                                "Sunday 01 March 2026  22:05:59 +0000 (0:00:02.771)       0:01:45.940 **********",
                                "skipping: [instance] => (item=osd.0)",
                                "skipping: [instance] => (item=osd.1)",
                                "skipping: [instance] => (item=osd.2)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Wait until OSD added to cephadm] *********************",
                                "Sunday 01 March 2026  22:06:09 +0000 (0:00:10.094)       0:01:56.034 **********",
                                "skipping: [instance] => (item=osd.0)",
                                "skipping: [instance] => (item=osd.1)",
                                "skipping: [instance] => (item=osd.2)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Ensure all OSDs are non-legacy] **********************",
                                "Sunday 01 March 2026  22:06:09 +0000 (0:00:00.198)       0:01:56.233 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get `ceph-volume lvm list` status] *******************",
                                "Sunday 01 March 2026  22:06:15 +0000 (0:00:05.501)       0:02:01.734 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Install OSDs] ****************************************",
                                "Sunday 01 March 2026  22:06:26 +0000 (0:00:10.974)       0:02:12.709 **********",
                                "skipping: [instance] => (item=/dev/ceph-instance-osd0/data)",
                                "skipping: [instance] => (item=/dev/ceph-instance-osd1/data)",
                                "skipping: [instance] => (item=/dev/ceph-instance-osd2/data)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get mon dump] ****************************************",
                                "Sunday 01 March 2026  22:06:26 +0000 (0:00:00.209)       0:02:12.919 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Mark require osd release] ****************************",
                                "Sunday 01 March 2026  22:06:28 +0000 (0:00:02.177)       0:02:15.097 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Wait for all OSD to be running] **********************",
                                "Sunday 01 March 2026  22:06:30 +0000 (0:00:02.155)       0:02:17.252 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/ceph/roles/osd/tasks/check-osds.yml for instance",
                                "",
                                "TASK [vexxhost.ceph.osd : Set the retry count] *********************************",
                                "Sunday 01 March 2026  22:06:31 +0000 (0:00:00.134)       0:02:17.387 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Get `ceph orch ps`] **********************************",
                                "Sunday 01 March 2026  22:06:31 +0000 (0:00:00.093)       0:02:17.480 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : OSD daemon list] *************************************",
                                "Sunday 01 March 2026  22:06:33 +0000 (0:00:02.147)       0:02:19.628 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Fail if any OSD not running] *************************",
                                "Sunday 01 March 2026  22:06:33 +0000 (0:00:00.088)       0:02:19.716 **********",
                                "skipping: [instance] => (item=1)",
                                "skipping: [instance] => (item=1)",
                                "skipping: [instance] => (item=1)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.ceph.osd : Fail if any duplicate OSD ID] ************************",
                                "Sunday 01 March 2026  22:06:33 +0000 (0:00:00.105)       0:02:19.822 **********",
                                "skipping: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Ensure RBD kernel module is loaded] **************************************",
                                "Sunday 01 March 2026  22:06:33 +0000 (0:00:00.115)       0:02:19.937 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:06:34 +0000 (0:00:00.503)       0:02:20.441 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.sysctl : Configure sysctl values] ********************",
                                "Sunday 01 March 2026  22:06:38 +0000 (0:00:04.394)       0:02:24.836 **********",
                                "ok: [instance] => (item={'name': 'fs.aio-max-nr', 'value': 1048576})",
                                "ok: [instance] => (item={'name': 'net.ipv4.tcp_timestamps', 'value': 0})",
                                "ok: [instance] => (item={'name': 'net.ipv4.tcp_sack', 'value': 1})",
                                "ok: [instance] => (item={'name': 'net.core.netdev_budget', 'value': 1000})",
                                "ok: [instance] => (item={'name': 'net.core.netdev_max_backlog', 'value': 250000})",
                                "ok: [instance] => (item={'name': 'net.core.rmem_max', 'value': 4194304})",
                                "ok: [instance] => (item={'name': 'net.core.wmem_max', 'value': 4194304})",
                                "ok: [instance] => (item={'name': 'net.core.rmem_default', 'value': 4194304})",
                                "ok: [instance] => (item={'name': 'net.core.wmem_default', 'value': 4194304})",
                                "ok: [instance] => (item={'name': 'net.core.optmem_max', 'value': 4194304})",
                                "ok: [instance] => (item={'name': 'net.ipv4.tcp_rmem', 'value': '4096 87380 4194304'})",
                                "ok: [instance] => (item={'name': 'net.ipv4.tcp_wmem', 'value': '4096 65536 4194304'})",
                                "ok: [instance] => (item={'name': 'net.ipv4.tcp_low_latency', 'value': 1})",
                                "ok: [instance] => (item={'name': 'net.ipv4.tcp_adv_win_scale', 'value': 1})",
                                "ok: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh1', 'value': 128})",
                                "ok: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh2', 'value': 28872})",
                                "ok: [instance] => (item={'name': 'net.ipv4.neigh.default.gc_thresh3', 'value': 32768})",
                                "ok: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh1', 'value': 128})",
                                "ok: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh2', 'value': 28872})",
                                "ok: [instance] => (item={'name': 'net.ipv6.neigh.default.gc_thresh3', 'value': 32768})",
                                "",
                                "TASK [vexxhost.atmosphere.ethtool : Create folder for persistent configuration] ***",
                                "Sunday 01 March 2026  22:06:42 +0000 (0:00:03.967)       0:02:28.803 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ethtool : Install persistent \"ethtool\" tuning] *******",
                                "Sunday 01 March 2026  22:06:42 +0000 (0:00:00.308)       0:02:29.111 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ethtool : Run \"ethtool\" tuning] **********************",
                                "Sunday 01 March 2026  22:06:43 +0000 (0:00:00.499)       0:02:29.611 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Set a fact with the \"atmosphere_images\" for other plays] *****************",
                                "Sunday 01 March 2026  22:06:43 +0000 (0:00:00.368)       0:02:29.979 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [Configure Kubernetes VIP] ************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:06:43 +0000 (0:00:00.099)       0:02:30.078 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/etc/kubernetes/manifests)] ***",
                                "Sunday 01 March 2026  22:06:47 +0000 (0:00:03.966)       0:02:34.045 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Uninstall legacy HA stack] ****************",
                                "Sunday 01 March 2026  22:06:47 +0000 (0:00:00.287)       0:02:34.332 **********",
                                "ok: [instance] => (item=/etc/keepalived/keepalived.conf)",
                                "ok: [instance] => (item=/etc/keepalived/check_apiserver.sh)",
                                "ok: [instance] => (item=/etc/kubernetes/manifests/keepalived.yaml)",
                                "ok: [instance] => (item=/etc/haproxy/haproxy.cfg)",
                                "ok: [instance] => (item=/etc/kubernetes/manifests/haproxy.yaml)",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Switch API server to run on port 6443] ****",
                                "Sunday 01 March 2026  22:06:48 +0000 (0:00:00.988)       0:02:35.320 **********",
                                "ok: [instance] => (item=/etc/kubernetes/manifests/kube-apiserver.yaml)",
                                "ok: [instance] => (item=/etc/kubernetes/controller-manager.conf)",
                                "ok: [instance] => (item=/etc/kubernetes/scheduler.conf)",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Check if super-admin.conf exists] *********",
                                "Sunday 01 March 2026  22:06:49 +0000 (0:00:00.601)       0:02:35.922 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Check if kubeadm has already run] *********",
                                "Sunday 01 March 2026  22:06:49 +0000 (0:00:00.232)       0:02:36.155 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path] ************",
                                "Sunday 01 March 2026  22:06:50 +0000 (0:00:00.224)       0:02:36.379 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Set fact with KUBECONFIG path (with super-admin.conf)] ***",
                                "Sunday 01 March 2026  22:06:50 +0000 (0:00:00.176)       0:02:36.556 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Upload Kubernetes manifest] ***************",
                                "Sunday 01 March 2026  22:06:50 +0000 (0:00:00.074)       0:02:36.631 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Ensure kube-vip configuration file] *******",
                                "Sunday 01 March 2026  22:06:50 +0000 (0:00:00.582)       0:02:37.213 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kube_vip : Flush handlers] ***************************",
                                "Sunday 01 March 2026  22:06:51 +0000 (0:00:00.256)       0:02:37.470 **********",
                                "",
                                "PLAY [Install Kubernetes] ******************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:06:51 +0000 (0:00:00.114)       0:02:37.584 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  22:06:54 +0000 (0:00:03.733)       0:02:41.318 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  22:06:55 +0000 (0:00:00.290)       0:02:41.608 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Sunday 01 March 2026  22:06:55 +0000 (0:00:00.111)       0:02:41.720 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:06:55 +0000 (0:00:00.295)       0:02:42.015 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/opencontainers/runc/releases/download/v1.4.0/runc.amd64\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:06:55 +0000 (0:00:00.103)       0:02:42.119 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:06:56 +0000 (0:00:00.421)       0:02:42.540 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  22:06:56 +0000 (0:00:00.326)       0:02:42.866 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  22:06:56 +0000 (0:00:00.318)       0:02:43.184 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:06:58 +0000 (0:00:01.314)       0:02:44.499 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/containerd/releases/download/v2.2.0/containerd-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:06:58 +0000 (0:00:00.123)       0:02:44.622 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:06:58 +0000 (0:00:00.444)       0:02:45.067 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Sunday 01 March 2026  22:07:00 +0000 (0:00:02.107)       0:02:47.175 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Sunday 01 March 2026  22:07:00 +0000 (0:00:00.087)       0:02:47.262 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Sunday 01 March 2026  22:07:01 +0000 (0:00:00.086)       0:02:47.349 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Sunday 01 March 2026  22:07:01 +0000 (0:00:00.086)       0:02:47.436 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Sunday 01 March 2026  22:07:02 +0000 (0:00:01.332)       0:02:48.768 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Sunday 01 March 2026  22:07:02 +0000 (0:00:00.519)       0:02:49.288 **********",
                                "ok: [instance] => (item={'path': '/etc/containerd'})",
                                "ok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Sunday 01 March 2026  22:07:03 +0000 (0:00:00.992)       0:02:50.280 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Sunday 01 March 2026  22:07:04 +0000 (0:00:00.601)       0:02:50.882 **********",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Sunday 01 March 2026  22:07:04 +0000 (0:00:00.041)       0:02:50.923 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the \"kubeadm-config\" ConfigMap] ***",
                                "Sunday 01 March 2026  22:07:05 +0000 (0:00:00.471)       0:02:51.395 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Parse the ClusterConfiguration] ***",
                                "Sunday 01 March 2026  22:07:06 +0000 (0:00:00.988)       0:02:52.383 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Retrieve the current Kubernetes version] ***",
                                "Sunday 01 March 2026  22:07:06 +0000 (0:00:00.084)       0:02:52.468 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Extract major, minor, and patch versions] ***",
                                "Sunday 01 March 2026  22:07:06 +0000 (0:00:00.078)       0:02:52.547 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Fail if we're jumping more than one minor version] ***",
                                "Sunday 01 March 2026  22:07:06 +0000 (0:00:00.081)       0:02:52.629 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes_upgrade_check : Set fact if we need to upgrade] ***",
                                "Sunday 01 March 2026  22:07:06 +0000 (0:00:00.074)       0:02:52.704 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  22:07:06 +0000 (0:00:00.119)       0:02:52.823 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:07:06 +0000 (0:00:00.384)       0:02:53.208 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubeadm\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:07:06 +0000 (0:00:00.100)       0:02:53.309 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:07:07 +0000 (0:00:00.385)       0:02:53.694 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  22:07:07 +0000 (0:00:00.134)       0:02:53.829 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:07:07 +0000 (0:00:00.284)       0:02:54.114 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubectl\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:07:07 +0000 (0:00:00.104)       0:02:54.218 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:07:08 +0000 (0:00:00.429)       0:02:54.648 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install SELinux packages] ***************",
                                "Sunday 01 March 2026  22:07:08 +0000 (0:00:00.178)       0:02:54.826 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Set SELinux to permissive at runtime] ***",
                                "Sunday 01 March 2026  22:07:08 +0000 (0:00:00.088)       0:02:54.915 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Persist SELinux permissive mode] ********",
                                "Sunday 01 March 2026  22:07:08 +0000 (0:00:00.087)       0:02:55.003 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Install AppArmor packages] **************",
                                "Sunday 01 March 2026  22:07:08 +0000 (0:00:00.088)       0:02:55.091 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create systemd service file for containerd] ***",
                                "Sunday 01 March 2026  22:07:10 +0000 (0:00:01.429)       0:02:56.521 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Create folders for configuration] *******",
                                "Sunday 01 March 2026  22:07:10 +0000 (0:00:00.528)       0:02:57.050 **********",
                                "ok: [instance] => (item={'path': '/etc/containerd'})",
                                "ok: [instance] => (item={'path': '/var/lib/containerd', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd', 'mode': '0o711'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.grpc.v1.cri', 'mode': '0o700'})",
                                "ok: [instance] => (item={'path': '/run/containerd/io.containerd.sandbox.controller.v1.shim', 'mode': '0o700'})",
                                "",
                                "TASK [vexxhost.containers.containerd : Create containerd config file] **********",
                                "Sunday 01 March 2026  22:07:11 +0000 (0:00:01.009)       0:02:58.059 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.containerd : Force any restarts if necessary] ********",
                                "Sunday 01 March 2026  22:07:12 +0000 (0:00:00.580)       0:02:58.640 **********",
                                "",
                                "TASK [vexxhost.containers.containerd : Enable and start service] ***************",
                                "Sunday 01 March 2026  22:07:12 +0000 (0:00:00.037)       0:02:58.677 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  22:07:12 +0000 (0:00:00.449)       0:02:59.127 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:07:13 +0000 (0:00:00.451)       0:02:59.578 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/crictl-v1.34.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:07:13 +0000 (0:00:00.120)       0:02:59.699 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:07:13 +0000 (0:00:00.430)       0:03:00.129 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:07:15 +0000 (0:00:01.245)       0:03:01.374 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/kubernetes-sigs/cri-tools/releases/download/v1.34.0/critest-v1.34.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:07:15 +0000 (0:00:00.127)       0:03:01.501 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:07:15 +0000 (0:00:00.414)       0:03:01.915 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.cri_tools : Create crictl config] ********************",
                                "Sunday 01 March 2026  22:07:16 +0000 (0:00:01.192)       0:03:03.108 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/opt/cni/bin)] *********",
                                "Sunday 01 March 2026  22:07:17 +0000 (0:00:00.538)       0:03:03.646 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  22:07:17 +0000 (0:00:00.289)       0:03:03.936 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:07:17 +0000 (0:00:00.291)       0:03:04.227 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containernetworking/plugins/releases/download/v1.8.0/cni-plugins-linux-amd64-v1.8.0.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:07:18 +0000 (0:00:00.115)       0:03:04.343 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:07:18 +0000 (0:00:00.423)       0:03:04.767 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Gather variables for each operating system] ***",
                                "Sunday 01 March 2026  22:07:20 +0000 (0:00:02.122)       0:03:06.890 **********",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/containers/roles/cni_plugins/vars/debian.yml)",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Install additional packages] ***********",
                                "Sunday 01 March 2026  22:07:20 +0000 (0:00:00.129)       0:03:07.019 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Ensure IPv6 is enabled] ****************",
                                "Sunday 01 March 2026  22:07:21 +0000 (0:00:01.208)       0:03:08.228 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Enable kernel modules on-boot] *********",
                                "Sunday 01 March 2026  22:07:22 +0000 (0:00:00.261)       0:03:08.489 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.cni_plugins : Enable kernel modules in runtime] ******",
                                "Sunday 01 March 2026  22:07:22 +0000 (0:00:00.497)       0:03:08.987 **********",
                                "ok: [instance] => (item=br_netfilter)",
                                "ok: [instance] => (item=ip_tables)",
                                "ok: [instance] => (item=ip6_tables)",
                                "ok: [instance] => (item=nf_conntrack)",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  22:07:23 +0000 (0:00:00.829)       0:03:09.817 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:07:23 +0000 (0:00:00.301)       0:03:10.118 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://dl.k8s.io/release/v1.28.13/bin/linux/amd64/kubelet\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:07:23 +0000 (0:00:00.110)       0:03:10.228 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:07:24 +0000 (0:00:00.460)       0:03:10.688 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Gather variables for each operating system] ***",
                                "Sunday 01 March 2026  22:07:24 +0000 (0:00:00.281)       0:03:10.970 **********",
                                "ok: [instance] => (item=/home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubelet/vars/debian.yml)",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Install coreutils] *************************",
                                "Sunday 01 March 2026  22:07:24 +0000 (0:00:00.140)       0:03:11.111 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Install additional packages] ***************",
                                "Sunday 01 March 2026  22:07:24 +0000 (0:00:00.094)       0:03:11.205 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Configure sysctl values] *******************",
                                "Sunday 01 March 2026  22:07:26 +0000 (0:00:01.233)       0:03:12.439 **********",
                                "ok: [instance] => (item={'name': 'net.ipv4.ip_forward', 'value': 1})",
                                "ok: [instance] => (item={'name': 'net.bridge.bridge-nf-call-iptables', 'value': 1})",
                                "ok: [instance] => (item={'name': 'net.bridge.bridge-nf-call-ip6tables', 'value': 1})",
                                "ok: [instance] => (item={'name': 'net.ipv4.conf.all.rp_filter', 'value': 0})",
                                "ok: [instance] => (item={'name': 'fs.inotify.max_queued_events', 'value': 1048576})",
                                "ok: [instance] => (item={'name': 'fs.inotify.max_user_instances', 'value': 8192})",
                                "ok: [instance] => (item={'name': 'fs.inotify.max_user_watches', 'value': 1048576})",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Create folders for kubernetes configuration] ***",
                                "Sunday 01 March 2026  22:07:27 +0000 (0:00:01.359)       0:03:13.798 **********",
                                "ok: [instance] => (item=/etc/systemd/system/kubelet.service.d)",
                                "ok: [instance] => (item=/etc/kubernetes)",
                                "ok: [instance] => (item=/etc/kubernetes/manifests)",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Add kubelet systemd service config] ********",
                                "Sunday 01 March 2026  22:07:28 +0000 (0:00:00.629)       0:03:14.427 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Add kubeadm dropin for kubelet systemd service config] ***",
                                "Sunday 01 March 2026  22:07:28 +0000 (0:00:00.478)       0:03:14.906 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Check swap status] *************************",
                                "Sunday 01 March 2026  22:07:29 +0000 (0:00:00.491)       0:03:15.397 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Disable swap] ******************************",
                                "Sunday 01 March 2026  22:07:29 +0000 (0:00:00.269)       0:03:15.667 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Remove swapfile from /etc/fstab] ***********",
                                "Sunday 01 March 2026  22:07:29 +0000 (0:00:00.075)       0:03:15.742 **********",
                                "ok: [instance] => (item=swap)",
                                "ok: [instance] => (item=none)",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Create noswap systemd service config file] ***",
                                "Sunday 01 March 2026  22:07:29 +0000 (0:00:00.543)       0:03:16.286 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Enable noswap service] *********************",
                                "Sunday 01 March 2026  22:07:30 +0000 (0:00:00.493)       0:03:16.779 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Force any restarts if necessary] ***********",
                                "Sunday 01 March 2026  22:07:30 +0000 (0:00:00.445)       0:03:17.225 **********",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Enable and start kubelet service] **********",
                                "Sunday 01 March 2026  22:07:30 +0000 (0:00:00.034)       0:03:17.259 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Ensure availability of dbus on Debian] *****",
                                "Sunday 01 March 2026  22:07:31 +0000 (0:00:00.405)       0:03:17.665 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Configure short hostname] ******************",
                                "Sunday 01 March 2026  22:07:32 +0000 (0:00:01.165)       0:03:18.830 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubelet : Ensure hostname inside hosts file] *********",
                                "Sunday 01 March 2026  22:07:35 +0000 (0:00:02.561)       0:03:21.392 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Remove kubernetes repository] ***********",
                                "Sunday 01 March 2026  22:07:35 +0000 (0:00:00.267)       0:03:21.659 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Setup control plane] ********************",
                                "Sunday 01 March 2026  22:07:35 +0000 (0:00:00.607)       0:03:22.267 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/control-plane.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Bootstrap cluster] **********************",
                                "Sunday 01 March 2026  22:07:36 +0000 (0:00:00.144)       0:03:22.412 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/kubernetes/tasks/bootstrap-cluster.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Check if any control plane is bootstrapped] ***",
                                "Sunday 01 March 2026  22:07:36 +0000 (0:00:00.142)       0:03:22.555 **********",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Pick node from pre-existing cluster] ****",
                                "Sunday 01 March 2026  22:07:36 +0000 (0:00:00.296)       0:03:22.852 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Select first node to initialize cluster] ***",
                                "Sunday 01 March 2026  22:07:36 +0000 (0:00:00.079)       0:03:22.931 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Print selected bootstrap node] **********",
                                "Sunday 01 March 2026  22:07:36 +0000 (0:00:00.075)       0:03:23.007 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"instance\"",
                                "}",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Upload cluster configuration for bootstrap node] ***",
                                "Sunday 01 March 2026  22:07:36 +0000 (0:00:00.071)       0:03:23.079 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create pki folder] **********************",
                                "Sunday 01 March 2026  22:07:37 +0000 (0:00:00.710)       0:03:23.789 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca key] ***************",
                                "Sunday 01 March 2026  22:07:37 +0000 (0:00:00.278)       0:03:24.068 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create kubernetes ca cert] **************",
                                "Sunday 01 March 2026  22:07:37 +0000 (0:00:00.073)       0:03:24.141 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create etcd-ca key] *********************",
                                "Sunday 01 March 2026  22:07:37 +0000 (0:00:00.070)       0:03:24.212 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create etcd-ca cert] ********************",
                                "Sunday 01 March 2026  22:07:37 +0000 (0:00:00.071)       0:03:24.284 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca key] **************",
                                "Sunday 01 March 2026  22:07:38 +0000 (0:00:00.074)       0:03:24.359 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create front-proxy-ca cert] *************",
                                "Sunday 01 March 2026  22:07:38 +0000 (0:00:00.066)       0:03:24.425 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Initialize cluster] *********************",
                                "Sunday 01 March 2026  22:07:38 +0000 (0:00:00.057)       0:03:24.483 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Check if the node is already part of the cluster] ***",
                                "Sunday 01 March 2026  22:07:38 +0000 (0:00:00.227)       0:03:24.710 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Join cluster] ***************************",
                                "Sunday 01 March 2026  22:07:38 +0000 (0:00:00.246)       0:03:24.957 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Create folder for admin configuration] ***",
                                "Sunday 01 March 2026  22:07:38 +0000 (0:00:00.068)       0:03:25.025 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Creating a symlink for admin configuration file] ***",
                                "Sunday 01 March 2026  22:07:38 +0000 (0:00:00.242)       0:03:25.267 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Add bash autocomplete for kubectl] ******",
                                "Sunday 01 March 2026  22:07:39 +0000 (0:00:00.238)       0:03:25.506 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Install PIP] ****************************",
                                "Sunday 01 March 2026  22:07:39 +0000 (0:00:00.236)       0:03:25.743 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using pip on supported systems] ***",
                                "Sunday 01 March 2026  22:07:40 +0000 (0:00:01.138)       0:03:26.882 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Enable EPEL repository] *****************",
                                "Sunday 01 March 2026  22:07:41 +0000 (0:00:01.214)       0:03:28.097 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Install Kubernetes Python package using package manager on supported systems] ***",
                                "Sunday 01 March 2026  22:07:41 +0000 (0:00:00.090)       0:03:28.187 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Allow workload on control plane node] ***",
                                "Sunday 01 March 2026  22:07:41 +0000 (0:00:00.082)       0:03:28.269 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Remove kube-proxy resources] ************",
                                "Sunday 01 March 2026  22:07:42 +0000 (0:00:00.804)       0:03:29.073 **********",
                                "skipping: [instance] => (item=DaemonSet)",
                                "skipping: [instance] => (item=ConfigMap)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Start an upgrade] ***********************",
                                "Sunday 01 March 2026  22:07:42 +0000 (0:00:00.074)       0:03:29.148 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Check if the Kubernetes API services is up to date] ***",
                                "Sunday 01 March 2026  22:07:42 +0000 (0:00:00.082)       0:03:29.230 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Trigger an upgrade of the Kubernetes API services] ***",
                                "Sunday 01 March 2026  22:07:42 +0000 (0:00:00.083)       0:03:29.314 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Setup nodes] ****************************",
                                "Sunday 01 March 2026  22:07:43 +0000 (0:00:00.076)       0:03:29.390 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Upgrade & restart Kubelet node for upgrade (if needed)] ***",
                                "Sunday 01 March 2026  22:07:43 +0000 (0:00:00.064)       0:03:29.454 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.kubernetes : Set node selector for CoreDNS components] ***",
                                "Sunday 01 March 2026  22:07:43 +0000 (0:00:00.541)       0:03:29.995 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [Install control-plane components] ****************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:07:44 +0000 (0:00:01.125)       0:03:31.121 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.forget_package : Forget package] *********************",
                                "Sunday 01 March 2026  22:07:49 +0000 (0:00:05.160)       0:03:36.282 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  22:07:50 +0000 (0:00:00.286)       0:03:36.568 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Sunday 01 March 2026  22:07:51 +0000 (0:00:01.222)       0:03:37.791 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:07:51 +0000 (0:00:00.277)       0:03:38.068 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://get.helm.sh/helm-v3.11.2-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:07:51 +0000 (0:00:00.109)       0:03:38.177 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:07:52 +0000 (0:00:00.389)       0:03:38.567 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Remove Helm repository] ***********************",
                                "Sunday 01 March 2026  22:07:53 +0000 (0:00:01.321)       0:03:39.889 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Add bash autocomplete for helm] ***************",
                                "Sunday 01 March 2026  22:07:53 +0000 (0:00:00.388)       0:03:40.277 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Get Helm plugins dir] *************************",
                                "Sunday 01 March 2026  22:07:54 +0000 (0:00:00.249)       0:03:40.527 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Create Helm plugins directory if it does not exist] ***",
                                "Sunday 01 March 2026  22:07:54 +0000 (0:00:00.313)       0:03:40.840 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.helm : Remove Helm diff plugin installed by kubernetes.core.helm_plugin] ***",
                                "Sunday 01 March 2026  22:07:54 +0000 (0:00:00.250)       0:03:41.091 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Install plugin] **********************************************************",
                                "Sunday 01 March 2026  22:07:54 +0000 (0:00:00.235)       0:03:41.327 **********",
                                "included: vexxhost.containers.download_artifact for instance",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:07:55 +0000 (0:00:00.116)       0:03:41.443 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/databus23/helm-diff/releases/download/v3.8.1/helm-diff-linux-amd64.tgz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:07:55 +0000 (0:00:00.215)       0:03:41.658 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:07:55 +0000 (0:00:00.385)       0:03:42.044 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:07:57 +0000 (0:00:01.459)       0:03:43.504 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:07:57 +0000 (0:00:00.104)       0:03:43.609 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cilium : Get current Kubernetes version] *************",
                                "Sunday 01 March 2026  22:07:57 +0000 (0:00:00.687)       0:03:44.296 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cilium : Deploy Helm chart] **************************",
                                "Sunday 01 March 2026  22:07:58 +0000 (0:00:00.999)       0:03:45.295 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:08:01 +0000 (0:00:02.091)       0:03:47.387 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kubernetes_node_labels : Add labels to node] *********",
                                "Sunday 01 March 2026  22:08:05 +0000 (0:00:04.219)       0:03:51.606 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [all] *********************************************************************",
                                "",
                                "TASK [Uninstall unattended-upgrades] *******************************************",
                                "Sunday 01 March 2026  22:08:06 +0000 (0:00:00.818)       0:03:52.425 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [controllers] *************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:08:07 +0000 (0:00:00.940)       0:03:53.366 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:08:12 +0000 (0:00:05.120)       0:03:58.486 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:08:12 +0000 (0:00:00.111)       0:03:58.598 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.local_path_provisioner : Deploy Helm chart] **********",
                                "Sunday 01 March 2026  22:08:12 +0000 (0:00:00.576)       0:03:59.174 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:08:14 +0000 (0:00:01.367)       0:04:00.542 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Collect \"ceph mon dump\" output from a monitor] ***",
                                "Sunday 01 March 2026  22:08:14 +0000 (0:00:00.097)       0:04:00.639 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Generate fact with list of Ceph monitors] ***",
                                "Sunday 01 March 2026  22:08:14 +0000 (0:00:00.113)       0:04:00.753 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Create Ceph pool] *********************",
                                "Sunday 01 March 2026  22:08:14 +0000 (0:00:00.070)       0:04:00.823 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Create user client.kube] **************",
                                "Sunday 01 March 2026  22:08:14 +0000 (0:00:00.115)       0:04:00.939 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Retrieve keyring for client.kube] *****",
                                "Sunday 01 March 2026  22:08:14 +0000 (0:00:00.141)       0:04:01.081 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Store keyring inside fact] ************",
                                "Sunday 01 March 2026  22:08:14 +0000 (0:00:00.126)       0:04:01.208 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_csi_rbd : Deploy Helm chart] ********************",
                                "Sunday 01 March 2026  22:08:15 +0000 (0:00:00.227)       0:04:01.435 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Clone PowerStore CSI from GitHub] ***",
                                "Sunday 01 March 2026  22:08:15 +0000 (0:00:00.141)       0:04:01.577 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Create Secret] **********************",
                                "Sunday 01 March 2026  22:08:15 +0000 (0:00:00.065)       0:04:01.642 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Create StorageClass] ****************",
                                "Sunday 01 March 2026  22:08:15 +0000 (0:00:00.073)       0:04:01.715 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.powerstore_csi : Deploy Helm chart] ******************",
                                "Sunday 01 March 2026  22:08:15 +0000 (0:00:00.074)       0:04:01.790 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Add backports PPA] **********************",
                                "Sunday 01 March 2026  22:08:15 +0000 (0:00:00.149)       0:04:01.939 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Install the multipathd package] *********",
                                "Sunday 01 March 2026  22:08:15 +0000 (0:00:00.086)       0:04:02.025 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Install the configuration file] *********",
                                "Sunday 01 March 2026  22:08:15 +0000 (0:00:00.067)       0:04:02.092 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Detect if InitiatorName is set] ***********",
                                "Sunday 01 March 2026  22:08:15 +0000 (0:00:00.100)       0:04:02.193 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Generate a new InitiatorName] *************",
                                "Sunday 01 March 2026  22:08:15 +0000 (0:00:00.061)       0:04:02.254 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Write the new InitiatorName] **************",
                                "Sunday 01 March 2026  22:08:15 +0000 (0:00:00.082)       0:04:02.337 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Create namespace] *************************",
                                "Sunday 01 March 2026  22:08:16 +0000 (0:00:00.064)       0:04:02.402 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Install Portworx] *************************",
                                "Sunday 01 March 2026  22:08:16 +0000 (0:00:00.059)       0:04:02.462 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Wait till the CRDs are created] ***********",
                                "Sunday 01 March 2026  22:08:16 +0000 (0:00:00.069)       0:04:02.531 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.portworx : Create Portworx Storage Cluster] **********",
                                "Sunday 01 March 2026  22:08:16 +0000 (0:00:00.063)       0:04:02.594 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.storpool_csi : Deploy CSI RBAC] **********************",
                                "Sunday 01 March 2026  22:08:16 +0000 (0:00:00.151)       0:04:02.746 **********",
                                "skipping: [instance] => (item={'name': 'controllerplugin'})",
                                "skipping: [instance] => (item={'name': 'nodeplugin'})",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.storpool_csi : Deploy CSI] ***************************",
                                "Sunday 01 March 2026  22:08:16 +0000 (0:00:00.089)       0:04:02.835 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.storpool_csi : Create StorageClass] ******************",
                                "Sunday 01 March 2026  22:08:16 +0000 (0:00:00.065)       0:04:02.900 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ibm_block_csi_driver : Deploy CSI] *******************",
                                "Sunday 01 March 2026  22:08:16 +0000 (0:00:00.147)       0:04:03.048 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ibm_block_csi_driver : Create Secret] ****************",
                                "Sunday 01 March 2026  22:08:16 +0000 (0:00:00.212)       0:04:03.261 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ibm_block_csi_driver : Create StorageClass] **********",
                                "Sunday 01 March 2026  22:08:16 +0000 (0:00:00.069)       0:04:03.331 **********",
                                "skipping: [instance]",
                                "",
                                "PLAY [Deploy Infrastructure] ***************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:08:17 +0000 (0:00:00.159)       0:04:03.491 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Deploy Helm chart] *******************************************************",
                                "Sunday 01 March 2026  22:08:20 +0000 (0:00:03.807)       0:04:07.298 **********",
                                "included: vexxhost.kubernetes.cert_manager for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:08:21 +0000 (0:00:00.116)       0:04:07.414 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:08:21 +0000 (0:00:00.107)       0:04:07.522 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cert_manager : Deploy Helm chart] ********************",
                                "Sunday 01 March 2026  22:08:21 +0000 (0:00:00.593)       0:04:08.116 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cluster_issuer : Create self-signed cluster issuer] ***",
                                "Sunday 01 March 2026  22:08:24 +0000 (0:00:02.271)       0:04:10.387 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cluster_issuer : Import tasks for ClusterIssuer type] ***",
                                "Sunday 01 March 2026  22:08:25 +0000 (0:00:01.035)       0:04:11.422 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/atmosphere/roles/cluster_issuer/tasks/type/self-signed/main.yml for instance",
                                "",
                                "TASK [vexxhost.atmosphere.cluster_issuer : Create ClusterIssuer] ***************",
                                "Sunday 01 March 2026  22:08:25 +0000 (0:00:00.107)       0:04:11.529 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cluster_issuer : Wait till the secret is created] ****",
                                "Sunday 01 March 2026  22:08:25 +0000 (0:00:00.760)       0:04:12.290 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cluster_issuer : Copy CA certificate on host] ********",
                                "Sunday 01 March 2026  22:08:26 +0000 (0:00:00.687)       0:04:12.978 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cluster_issuer : Flush all handlers] *****************",
                                "Sunday 01 March 2026  22:08:27 +0000 (0:00:00.508)       0:04:13.486 **********",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:08:27 +0000 (0:00:00.107)       0:04:13.594 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:08:27 +0000 (0:00:00.124)       0:04:13.718 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ingress_nginx : Deploy Helm chart] *******************",
                                "Sunday 01 March 2026  22:08:27 +0000 (0:00:00.585)       0:04:14.303 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:08:29 +0000 (0:00:01.721)       0:04:16.025 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:08:29 +0000 (0:00:00.115)       0:04:16.140 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq_cluster_operator : Install all CRDs] ********",
                                "Sunday 01 March 2026  22:08:30 +0000 (0:00:00.611)       0:04:16.752 **********",
                                "ok: [instance] => (item=messaging-topology-operator)",
                                "ok: [instance] => (item=rabbitmq-cluster)",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq_cluster_operator : Deploy Helm chart] *******",
                                "Sunday 01 March 2026  22:08:33 +0000 (0:00:03.361)       0:04:20.113 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:08:36 +0000 (0:00:02.452)       0:04:22.566 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:08:36 +0000 (0:00:00.117)       0:04:22.683 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster_operator : Install all CRDs] ***",
                                "Sunday 01 March 2026  22:08:36 +0000 (0:00:00.573)       0:04:23.256 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster_operator : Deploy Helm chart] ***",
                                "Sunday 01 March 2026  22:08:40 +0000 (0:00:03.430)       0:04:26.687 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Check if the Percona XtraDB cluster secret exists] ***",
                                "Sunday 01 March 2026  22:08:42 +0000 (0:00:01.688)       0:04:28.375 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Create a secret] ************",
                                "Sunday 01 March 2026  22:08:42 +0000 (0:00:00.736)       0:04:29.111 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Check if the Percona XtraDB cluster exists] ***",
                                "Sunday 01 March 2026  22:08:42 +0000 (0:00:00.072)       0:04:29.184 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Get current status of the cluster] ***",
                                "Sunday 01 March 2026  22:08:43 +0000 (0:00:00.780)       0:04:29.964 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Assert that the cluster is healthy before upgrade] ***",
                                "Sunday 01 March 2026  22:08:43 +0000 (0:00:00.087)       0:04:30.052 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Stop PXC-operator] **********",
                                "Sunday 01 March 2026  22:08:43 +0000 (0:00:00.082)       0:04:30.135 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Change the cluster Statefulset image to 8.0] ***",
                                "Sunday 01 March 2026  22:08:43 +0000 (0:00:00.079)       0:04:30.214 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Wait until the cluster Statefulset rollout] ***",
                                "Sunday 01 March 2026  22:08:43 +0000 (0:00:00.081)       0:04:30.296 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Update pxc cluster spec] ****",
                                "Sunday 01 March 2026  22:08:44 +0000 (0:00:00.078)       0:04:30.374 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Start PXC-operator] *********",
                                "Sunday 01 March 2026  22:08:44 +0000 (0:00:00.082)       0:04:30.457 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Apply Percona XtraDB cluster] ***",
                                "Sunday 01 March 2026  22:08:44 +0000 (0:00:00.082)       0:04:30.539 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.percona_xtradb_cluster : Create percona haproxy metric service] ***",
                                "Sunday 01 March 2026  22:08:45 +0000 (0:00:00.894)       0:04:31.433 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:08:45 +0000 (0:00:00.838)       0:04:32.271 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:08:46 +0000 (0:00:00.108)       0:04:32.380 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.valkey : Create TLS resources] ***********************",
                                "Sunday 01 March 2026  22:08:46 +0000 (0:00:00.579)       0:04:32.960 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.valkey : Deploy Helm chart] **************************",
                                "Sunday 01 March 2026  22:08:47 +0000 (0:00:00.805)       0:04:33.766 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:08:49 +0000 (0:00:01.961)       0:04:35.727 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:08:49 +0000 (0:00:00.076)       0:04:35.804 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:08:49 +0000 (0:00:00.075)       0:04:35.879 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:08:49 +0000 (0:00:00.223)       0:04:36.102 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:08:49 +0000 (0:00:00.067)       0:04:36.170 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:08:50 +0000 (0:00:00.708)       0:04:36.878 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:08:50 +0000 (0:00:00.084)       0:04:36.963 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:08:50 +0000 (0:00:00.074)       0:04:37.037 **********",
                                "ok: [instance] => (item=oslo_db)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:08:50 +0000 (0:00:00.087)       0:04:37.125 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:08:50 +0000 (0:00:00.113)       0:04:37.239 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:08:51 +0000 (0:00:00.105)       0:04:37.344 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Get the Kuberentes service for Percona XtraDB Cluster] ***",
                                "Sunday 01 March 2026  22:08:51 +0000 (0:00:00.594)       0:04:37.939 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Install MySQL python package] *************",
                                "Sunday 01 March 2026  22:08:52 +0000 (0:00:00.752)       0:04:38.691 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Check MySQL ready] ************************",
                                "Sunday 01 March 2026  22:08:53 +0000 (0:00:01.002)       0:04:39.694 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Create Keycloak database] *****************",
                                "Sunday 01 March 2026  22:08:53 +0000 (0:00:00.513)       0:04:40.207 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Create a Keycloak user] *******************",
                                "Sunday 01 March 2026  22:08:54 +0000 (0:00:00.539)       0:04:40.747 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Disable pxc strict mode] ******************",
                                "Sunday 01 March 2026  22:08:54 +0000 (0:00:00.523)       0:04:41.270 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Deploy Helm chart] ************************",
                                "Sunday 01 March 2026  22:08:55 +0000 (0:00:00.488)       0:04:41.759 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Wait until keycloak ready] ****************",
                                "Sunday 01 March 2026  22:08:57 +0000 (0:00:01.642)       0:04:43.402 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Keycloak Ingress] *************************************************",
                                "Sunday 01 March 2026  22:08:57 +0000 (0:00:00.726)       0:04:44.128 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress keycloak] *******************",
                                "Sunday 01 March 2026  22:08:57 +0000 (0:00:00.112)       0:04:44.241 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keycloak : Enable pxc strict mode] *******************",
                                "Sunday 01 March 2026  22:08:59 +0000 (0:00:01.189)       0:04:45.430 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keepalived : Deploy service] *************************",
                                "Sunday 01 March 2026  22:08:59 +0000 (0:00:00.359)       0:04:45.790 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [Deploy Monitoring] *******************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:09:00 +0000 (0:00:00.888)       0:04:46.678 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:09:04 +0000 (0:00:04.314)       0:04:50.992 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:09:04 +0000 (0:00:00.113)       0:04:51.106 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.node_feature_discovery : Install all CRDs] ***********",
                                "Sunday 01 March 2026  22:09:05 +0000 (0:00:00.594)       0:04:51.700 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.node_feature_discovery : Deploy Helm chart] **********",
                                "Sunday 01 March 2026  22:09:06 +0000 (0:00:00.868)       0:04:52.569 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:09:07 +0000 (0:00:01.605)       0:04:54.175 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:09:07 +0000 (0:00:00.106)       0:04:54.282 **********",
                                "ok: [instance]",
                                "",
                                "TASK [atmosphere.common.secretgen_controller : Deploy secretgen-controller] ****",
                                "Sunday 01 March 2026  22:09:08 +0000 (0:00:00.643)       0:04:54.925 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Wait until Keycloak service is ready] ***",
                                "Sunday 01 March 2026  22:09:09 +0000 (0:00:01.346)       0:04:56.272 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Create Keycloak realm] *******",
                                "Sunday 01 March 2026  22:09:10 +0000 (0:00:00.725)       0:04:56.997 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Add client roles in \"id_token\"] ***",
                                "Sunday 01 March 2026  22:09:12 +0000 (0:00:01.457)       0:04:58.455 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Retrieve \"etcd\" CA certificate] ***",
                                "Sunday 01 March 2026  22:09:13 +0000 (0:00:01.284)       0:04:59.740 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Retrieve \"etcd\" client certificate] ***",
                                "Sunday 01 March 2026  22:09:13 +0000 (0:00:00.476)       0:05:00.216 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Retrieve \"etcd\" client key] ***",
                                "Sunday 01 March 2026  22:09:14 +0000 (0:00:00.242)       0:05:00.458 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Create secrets for monitoring] ***",
                                "Sunday 01 March 2026  22:09:14 +0000 (0:00:00.230)       0:05:00.689 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Generate client secret passwords] ***",
                                "Sunday 01 March 2026  22:09:15 +0000 (0:00:00.724)       0:05:01.414 **********",
                                "ok: [instance] => (item=alertmanager)",
                                "ok: [instance] => (item=grafana)",
                                "ok: [instance] => (item=prometheus)",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Collect all client secrets] ***",
                                "Sunday 01 March 2026  22:09:17 +0000 (0:00:02.144)       0:05:03.558 **********",
                                "ok: [instance] => (item=alertmanager)",
                                "ok: [instance] => (item=grafana)",
                                "ok: [instance] => (item=prometheus)",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Create Keycloak clients] *****",
                                "Sunday 01 March 2026  22:09:19 +0000 (0:00:01.988)       0:05:05.547 **********",
                                "ok: [instance] => (item=None)",
                                "ok: [instance] => (item=None)",
                                "ok: [instance] => (item=None)",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Create Keycloak roles] *******",
                                "Sunday 01 March 2026  22:09:22 +0000 (0:00:03.413)       0:05:08.960 **********",
                                "ok: [instance] => (item=None)",
                                "ok: [instance] => (item=None)",
                                "ok: [instance] => (item=None)",
                                "ok: [instance] => (item=None)",
                                "ok: [instance] => (item=None)",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Generate cookie secrets] *****",
                                "Sunday 01 March 2026  22:09:27 +0000 (0:00:04.625)       0:05:13.586 **********",
                                "ok: [instance] => (item=alertmanager)",
                                "ok: [instance] => (item=prometheus)",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Generate OAuth2 proxy configuration] ***",
                                "Sunday 01 March 2026  22:09:28 +0000 (0:00:01.443)       0:05:15.029 **********",
                                "ok: [instance] => (item=alertmanager)",
                                "ok: [instance] => (item=prometheus)",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Create certificate issuer] ***",
                                "Sunday 01 March 2026  22:09:30 +0000 (0:00:01.487)       0:05:16.517 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Install all CRDs] ************",
                                "Sunday 01 March 2026  22:09:30 +0000 (0:00:00.725)       0:05:17.242 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Deploy additional dashboards] ***",
                                "Sunday 01 March 2026  22:09:39 +0000 (0:00:08.398)       0:05:25.641 **********",
                                "ok: [instance] => (item={'name': 'haproxy', 'state': 'present'})",
                                "ok: [instance] => (item={'name': 'goldpinger', 'state': 'present'})",
                                "ok: [instance] => (item={'name': 'node-exporter-full', 'state': 'present'})",
                                "ok: [instance] => (item={'name': 'ceph-cluster', 'state': 'present'})",
                                "ok: [instance] => (item={'name': 'ceph-cluster-advanced', 'state': 'present'})",
                                "ok: [instance] => (item={'name': 'hosts-overview', 'state': 'present'})",
                                "ok: [instance] => (item={'name': 'host-details', 'state': 'present'})",
                                "ok: [instance] => (item={'name': 'pool-overview', 'state': 'present'})",
                                "ok: [instance] => (item={'name': 'pool-detail', 'state': 'present'})",
                                "ok: [instance] => (item={'name': 'osds-overview', 'state': 'present'})",
                                "ok: [instance] => (item={'name': 'osd-device-details', 'state': 'present'})",
                                "ok: [instance] => (item={'name': 'rbd-overview', 'state': 'present'})",
                                "ok: [instance] => (item={'name': 'rbd-details', 'state': 'present'})",
                                "",
                                "TASK [vexxhost.atmosphere.kube_prometheus_stack : Deploy Helm chart] ***********",
                                "Sunday 01 March 2026  22:09:54 +0000 (0:00:15.570)       0:05:41.211 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:10:10 +0000 (0:00:15.437)       0:05:56.649 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:10:10 +0000 (0:00:00.125)       0:05:56.774 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.loki : Deploy Helm chart] ****************************",
                                "Sunday 01 March 2026  22:10:11 +0000 (0:00:00.653)       0:05:57.427 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:10:14 +0000 (0:00:03.197)       0:06:00.624 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:10:14 +0000 (0:00:00.121)       0:06:00.746 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.vector : Deploy Helm chart] **************************",
                                "Sunday 01 March 2026  22:10:15 +0000 (0:00:00.624)       0:06:01.371 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:10:16 +0000 (0:00:01.624)       0:06:02.995 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:10:16 +0000 (0:00:00.121)       0:06:03.117 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.goldpinger : Deploy Helm chart] **********************",
                                "Sunday 01 March 2026  22:10:17 +0000 (0:00:00.602)       0:06:03.719 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ipmi_exporter : Deploy service] **********************",
                                "Sunday 01 March 2026  22:10:18 +0000 (0:00:01.592)       0:06:05.312 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:10:19 +0000 (0:00:00.902)       0:06:06.214 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:10:19 +0000 (0:00:00.124)       0:06:06.339 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.prometheus_pushgateway : Deploy Helm chart] **********",
                                "Sunday 01 March 2026  22:10:20 +0000 (0:00:00.593)       0:06:06.933 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [Deploy OpenStack] ********************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:10:22 +0000 (0:00:01.774)       0:06:08.707 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:10:26 +0000 (0:00:04.305)       0:06:13.012 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:10:26 +0000 (0:00:00.119)       0:06:13.132 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:10:26 +0000 (0:00:00.082)       0:06:13.214 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:10:26 +0000 (0:00:00.088)       0:06:13.303 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:10:27 +0000 (0:00:00.081)       0:06:13.384 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:10:27 +0000 (0:00:00.078)       0:06:13.463 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:10:27 +0000 (0:00:00.079)       0:06:13.543 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:10:27 +0000 (0:00:00.086)       0:06:13.630 **********",
                                "ok: [instance] => (item=oslo_cache)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:10:27 +0000 (0:00:00.096)       0:06:13.726 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:10:27 +0000 (0:00:00.130)       0:06:13.857 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:10:27 +0000 (0:00:00.121)       0:06:13.978 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.memcached : Deploy Helm chart] ***********************",
                                "Sunday 01 March 2026  22:10:28 +0000 (0:00:00.631)       0:06:14.609 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.memcached : Apply manifests for monitoring] **********",
                                "Sunday 01 March 2026  22:10:30 +0000 (0:00:01.880)       0:06:16.490 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstacksdk : Install openstacksdk] *****************",
                                "Sunday 01 March 2026  22:10:30 +0000 (0:00:00.790)       0:06:17.281 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstacksdk : Create openstack config directory] ****",
                                "Sunday 01 March 2026  22:10:31 +0000 (0:00:01.039)       0:06:18.320 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstacksdk : Generate cloud config file] ***********",
                                "Sunday 01 March 2026  22:10:32 +0000 (0:00:00.294)       0:06:18.615 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:10:32 +0000 (0:00:00.619)       0:06:19.234 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:10:33 +0000 (0:00:00.123)       0:06:19.358 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  22:10:33 +0000 (0:00:00.130)       0:06:19.489 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  22:10:33 +0000 (0:00:00.775)       0:06:20.265 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  22:10:34 +0000 (0:00:00.186)       0:06:20.451 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  22:10:34 +0000 (0:00:00.195)       0:06:20.647 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  22:10:34 +0000 (0:00:00.188)       0:06:20.835 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:10:35 +0000 (0:00:00.936)       0:06:21.772 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:10:36 +0000 (0:00:00.718)       0:06:22.490 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:10:36 +0000 (0:00:00.095)       0:06:22.586 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:10:36 +0000 (0:00:00.079)       0:06:22.665 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:10:36 +0000 (0:00:00.088)       0:06:22.754 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:10:36 +0000 (0:00:00.077)       0:06:22.831 **********",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:10:36 +0000 (0:00:00.121)       0:06:22.952 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:10:36 +0000 (0:00:00.108)       0:06:23.061 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:10:36 +0000 (0:00:00.110)       0:06:23.172 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Create Keycloak realms] *******************",
                                "Sunday 01 March 2026  22:10:37 +0000 (0:00:00.629)       0:06:23.802 **********",
                                "ok: [instance] => (item=None)",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Setup Keycloak Authentication Required Actions (MFA)] ***",
                                "Sunday 01 March 2026  22:10:38 +0000 (0:00:01.285)       0:06:25.087 **********",
                                "ok: [instance] => (item=atmosphere)",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Create ConfigMap with all OpenID connect configurations] ***",
                                "Sunday 01 March 2026  22:10:40 +0000 (0:00:01.303)       0:06:26.391 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Create Keycloak clients] ******************",
                                "Sunday 01 March 2026  22:10:41 +0000 (0:00:00.995)       0:06:27.386 **********",
                                "ok: [instance] => (item=None)",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Assign realm-management roles to service account] ***",
                                "Sunday 01 March 2026  22:10:42 +0000 (0:00:00.993)       0:06:28.380 **********",
                                "ok: [instance] => (item=None)",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Deploy Helm chart] ************************",
                                "Sunday 01 March 2026  22:10:43 +0000 (0:00:01.260)       0:06:29.641 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:10:46 +0000 (0:00:03.240)       0:06:32.881 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:10:46 +0000 (0:00:00.188)       0:06:33.070 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:10:46 +0000 (0:00:00.086)       0:06:33.156 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:10:46 +0000 (0:00:00.077)       0:06:33.234 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress identity] *************************************************",
                                "Sunday 01 March 2026  22:10:46 +0000 (0:00:00.098)       0:06:33.333 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress identity] *******************",
                                "Sunday 01 March 2026  22:10:47 +0000 (0:00:00.140)       0:06:33.473 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Validate if ingress is reachable] *********",
                                "Sunday 01 March 2026  22:10:48 +0000 (0:00:01.139)       0:06:34.613 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Wait until identity service ready] ********",
                                "Sunday 01 March 2026  22:10:48 +0000 (0:00:00.386)       0:06:34.999 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Create Keystone domains] ******************",
                                "Sunday 01 March 2026  22:10:49 +0000 (0:00:00.810)       0:06:35.809 **********",
                                "ok: [instance] => (item=atmosphere)",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Create Keystone identity providers] *******",
                                "Sunday 01 March 2026  22:10:50 +0000 (0:00:01.236)       0:06:37.046 **********",
                                "ok: [instance] => (item=atmosphere)",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Create Keystone federation mappings] ******",
                                "Sunday 01 March 2026  22:10:51 +0000 (0:00:01.255)       0:06:38.301 **********",
                                "ok: [instance] => (item=atmosphere)",
                                "",
                                "TASK [vexxhost.atmosphere.keystone : Create Keystone federation protocols] *****",
                                "Sunday 01 March 2026  22:10:53 +0000 (0:00:01.171)       0:06:39.473 **********",
                                "ok: [instance] => (item=atmosphere)",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/etc/nerdctl)] *********",
                                "Sunday 01 March 2026  22:10:54 +0000 (0:00:01.330)       0:06:40.803 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  22:10:54 +0000 (0:00:00.331)       0:06:41.134 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Sunday 01 March 2026  22:10:56 +0000 (0:00:01.736)       0:06:42.871 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:10:56 +0000 (0:00:00.282)       0:06:43.154 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/nerdctl/releases/download/v2.2.0/nerdctl-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:10:56 +0000 (0:00:00.126)       0:06:43.281 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:10:57 +0000 (0:00:00.407)       0:06:43.689 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.nerdctl : Create nerdctl config] *********************",
                                "Sunday 01 March 2026  22:10:58 +0000 (0:00:01.110)       0:06:44.800 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Uninstall OpenStack client system packages] ***",
                                "Sunday 01 March 2026  22:10:59 +0000 (0:00:00.584)       0:06:45.384 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Uninstall Ubuntu Cloud Archive keyring] ***",
                                "Sunday 01 March 2026  22:10:59 +0000 (0:00:00.907)       0:06:46.291 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Remove Ubuntu Cloud Archive repository] ***",
                                "Sunday 01 March 2026  22:11:00 +0000 (0:00:00.824)       0:06:47.115 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Generate OpenStack-Helm endpoints] ***************************************",
                                "Sunday 01 March 2026  22:11:01 +0000 (0:00:00.423)       0:06:47.539 **********",
                                "included: openstack_helm_endpoints for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:11:01 +0000 (0:00:00.195)       0:06:47.735 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:11:01 +0000 (0:00:00.078)       0:06:47.813 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:11:01 +0000 (0:00:00.071)       0:06:47.885 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:11:01 +0000 (0:00:00.073)       0:06:47.958 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:11:01 +0000 (0:00:00.072)       0:06:48.030 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:11:01 +0000 (0:00:00.081)       0:06:48.112 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:11:01 +0000 (0:00:00.075)       0:06:48.187 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:11:01 +0000 (0:00:00.089)       0:06:48.277 **********",
                                "ok: [instance] => (item=identity)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:11:02 +0000 (0:00:00.096)       0:06:48.374 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Generate openrc file] ****************",
                                "Sunday 01 March 2026  22:11:02 +0000 (0:00:00.117)       0:06:48.491 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Generate openstack aliases] **********",
                                "Sunday 01 March 2026  22:11:02 +0000 (0:00:00.498)       0:06:48.989 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:11:03 +0000 (0:00:00.695)       0:06:49.685 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:11:03 +0000 (0:00:00.129)       0:06:49.815 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  22:11:03 +0000 (0:00:00.133)       0:06:49.949 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  22:11:04 +0000 (0:00:00.762)       0:06:50.711 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  22:11:04 +0000 (0:00:00.188)       0:06:50.900 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  22:11:04 +0000 (0:00:00.180)       0:06:51.080 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  22:11:04 +0000 (0:00:00.201)       0:06:51.281 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:11:05 +0000 (0:00:00.897)       0:06:52.179 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:11:06 +0000 (0:00:00.790)       0:06:52.970 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:11:06 +0000 (0:00:00.081)       0:06:53.052 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:11:06 +0000 (0:00:00.085)       0:06:53.138 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:11:06 +0000 (0:00:00.080)       0:06:53.218 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:11:06 +0000 (0:00:00.083)       0:06:53.301 **********",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=key_manager)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:11:07 +0000 (0:00:00.154)       0:06:53.455 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:11:07 +0000 (0:00:00.118)       0:06:53.574 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:11:07 +0000 (0:00:00.109)       0:06:53.683 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.barbican : Deploy Helm chart] ************************",
                                "Sunday 01 March 2026  22:11:07 +0000 (0:00:00.641)       0:06:54.325 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:11:10 +0000 (0:00:02.076)       0:06:56.402 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:11:10 +0000 (0:00:00.167)       0:06:56.570 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:11:10 +0000 (0:00:00.073)       0:06:56.643 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:11:10 +0000 (0:00:00.077)       0:06:56.720 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress key-manager] **********************************************",
                                "Sunday 01 March 2026  22:11:10 +0000 (0:00:00.090)       0:06:56.811 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress key-manager] ****************",
                                "Sunday 01 March 2026  22:11:10 +0000 (0:00:00.123)       0:06:56.934 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.barbican : Create creator role] **********************",
                                "Sunday 01 March 2026  22:11:12 +0000 (0:00:01.489)       0:06:58.424 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.barbican : Add implied roles] ************************",
                                "Sunday 01 March 2026  22:11:13 +0000 (0:00:01.028)       0:06:59.452 **********",
                                "ok: [instance] => (item={'role': 'member', 'implies': 'creator'})",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:11:16 +0000 (0:00:03.826)       0:07:03.279 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:11:17 +0000 (0:00:00.120)       0:07:03.399 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph : Deploy Helm chart] ***********************",
                                "Sunday 01 March 2026  22:11:17 +0000 (0:00:00.640)       0:07:04.040 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:11:20 +0000 (0:00:02.711)       0:07:06.751 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:11:20 +0000 (0:00:00.082)       0:07:06.834 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:11:20 +0000 (0:00:00.067)       0:07:06.901 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:11:20 +0000 (0:00:00.080)       0:07:06.981 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:11:20 +0000 (0:00:00.077)       0:07:07.059 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:11:20 +0000 (0:00:00.070)       0:07:07.130 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:11:20 +0000 (0:00:00.088)       0:07:07.218 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:11:20 +0000 (0:00:00.089)       0:07:07.308 **********",
                                "ok: [instance] => (item=rook_ceph_cluster)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:11:21 +0000 (0:00:00.102)       0:07:07.410 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:11:21 +0000 (0:00:00.111)       0:07:07.522 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:11:21 +0000 (0:00:00.109)       0:07:07.631 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Set mgr/cephadm/warn_on_stray_daemons to false] ***",
                                "Sunday 01 March 2026  22:11:21 +0000 (0:00:00.608)       0:07:08.239 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Collect \"ceph quorum_status\" output from a monitor] ***",
                                "Sunday 01 March 2026  22:11:36 +0000 (0:00:14.809)       0:07:23.049 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Retrieve keyring for client.admin] ***",
                                "Sunday 01 March 2026  22:11:51 +0000 (0:00:15.061)       0:07:38.111 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Retrieve keyring for monitors] ***",
                                "Sunday 01 March 2026  22:11:53 +0000 (0:00:01.643)       0:07:39.755 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Create Ceph cluster resource] ****",
                                "Sunday 01 March 2026  22:11:55 +0000 (0:00:01.631)       0:07:41.386 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Deploy Helm chart] ***************",
                                "Sunday 01 March 2026  22:11:55 +0000 (0:00:00.835)       0:07:42.221 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Create OpenStack user] ***********",
                                "Sunday 01 March 2026  22:11:57 +0000 (0:00:01.833)       0:07:44.055 **********",
                                "[WARNING]: Module did not set no_log for update_password",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Grant access to \"service\" project] ***",
                                "Sunday 01 March 2026  22:11:58 +0000 (0:00:01.153)       0:07:45.208 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Create OpenStack service] ********",
                                "Sunday 01 March 2026  22:12:02 +0000 (0:00:03.924)       0:07:49.133 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rook_ceph_cluster : Create OpenStack endpoints] ******",
                                "Sunday 01 March 2026  22:12:03 +0000 (0:00:01.069)       0:07:50.203 **********",
                                "ok: [instance] => (item={'interface': 'public', 'url': 'https://object-store.199-204-45-156.nip.io/swift/v1/%(tenant_id)s'})",
                                "ok: [instance] => (item={'interface': 'internal', 'url': 'http://rook-ceph-rgw-ceph.openstack.svc.cluster.local/swift/v1/%(tenant_id)s'})",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:12:05 +0000 (0:00:01.943)       0:07:52.146 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:12:06 +0000 (0:00:00.200)       0:07:52.346 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:12:06 +0000 (0:00:00.096)       0:07:52.443 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:12:06 +0000 (0:00:00.108)       0:07:52.552 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress rook-ceph-cluster] ****************************************",
                                "Sunday 01 March 2026  22:12:06 +0000 (0:00:00.101)       0:07:52.654 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress rook-ceph-cluster] **********",
                                "Sunday 01 March 2026  22:12:06 +0000 (0:00:00.166)       0:07:52.820 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:12:07 +0000 (0:00:01.237)       0:07:54.057 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:12:07 +0000 (0:00:00.125)       0:07:54.182 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Collect \"ceph mon dump\" output from a monitor] ***",
                                "Sunday 01 March 2026  22:12:08 +0000 (0:00:00.648)       0:07:54.831 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Generate fact with list of Ceph monitors] ***",
                                "Sunday 01 March 2026  22:12:23 +0000 (0:00:14.926)       0:08:09.758 **********",
                                "ok: [instance] => (item=10.96.240.200)",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Create Ceph service] *************",
                                "Sunday 01 March 2026  22:12:23 +0000 (0:00:00.117)       0:08:09.875 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Create Ceph endpoints] ***********",
                                "Sunday 01 March 2026  22:12:24 +0000 (0:00:00.756)       0:08:10.631 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Retrieve client.admin keyring] ***",
                                "Sunday 01 March 2026  22:12:25 +0000 (0:00:00.800)       0:08:11.432 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Parse client.admin keyring] ******",
                                "Sunday 01 March 2026  22:12:26 +0000 (0:00:01.315)       0:08:12.748 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Create \"pvc-ceph-client-key\" secret] ***",
                                "Sunday 01 March 2026  22:12:26 +0000 (0:00:00.096)       0:08:12.845 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ceph_provisioners : Deploy Helm chart] ***************",
                                "Sunday 01 March 2026  22:12:27 +0000 (0:00:00.956)       0:08:13.801 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:12:29 +0000 (0:00:01.734)       0:08:15.536 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:12:29 +0000 (0:00:00.136)       0:08:15.672 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  22:12:29 +0000 (0:00:00.143)       0:08:15.816 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  22:12:30 +0000 (0:00:00.803)       0:08:16.619 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  22:12:30 +0000 (0:00:00.193)       0:08:16.813 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  22:12:30 +0000 (0:00:00.203)       0:08:17.016 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  22:12:30 +0000 (0:00:00.196)       0:08:17.213 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:12:31 +0000 (0:00:00.951)       0:08:18.164 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:12:32 +0000 (0:00:00.710)       0:08:18.874 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:12:32 +0000 (0:00:00.088)       0:08:18.963 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:12:32 +0000 (0:00:00.078)       0:08:19.041 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:12:32 +0000 (0:00:00.078)       0:08:19.120 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:12:32 +0000 (0:00:00.089)       0:08:19.209 **********",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=dashboard)",
                                "ok: [instance] => (item=image)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:12:33 +0000 (0:00:00.148)       0:08:19.358 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:12:33 +0000 (0:00:00.120)       0:08:19.478 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:12:33 +0000 (0:00:00.118)       0:08:19.597 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance : Deploy Helm chart] **************************",
                                "Sunday 01 March 2026  22:12:33 +0000 (0:00:00.653)       0:08:20.250 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:12:36 +0000 (0:00:02.531)       0:08:22.782 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:12:36 +0000 (0:00:00.171)       0:08:22.953 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:12:36 +0000 (0:00:00.087)       0:08:23.041 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:12:36 +0000 (0:00:00.092)       0:08:23.133 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress image] ****************************************************",
                                "Sunday 01 March 2026  22:12:36 +0000 (0:00:00.098)       0:08:23.231 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress image] **********************",
                                "Sunday 01 March 2026  22:12:37 +0000 (0:00:00.307)       0:08:23.539 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create images] ***********************************************************",
                                "Sunday 01 March 2026  22:12:38 +0000 (0:00:01.192)       0:08:24.732 **********",
                                "included: glance_image for instance => (item={'container_format': 'bare', 'disk_format': 'raw', 'is_public': True, 'min_disk': 1, 'name': 'cirros', 'url': 'http://download.cirros-cloud.net/0.6.2/cirros-0.6.2-x86_64-disk.img'})",
                                "",
                                "TASK [vexxhost.atmosphere.qemu_utils : Install packages] ***********************",
                                "Sunday 01 March 2026  22:12:38 +0000 (0:00:00.201)       0:08:24.933 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************",
                                "Sunday 01 March 2026  22:12:39 +0000 (0:00:01.277)       0:08:26.211 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****",
                                "Sunday 01 March 2026  22:12:40 +0000 (0:00:01.082)       0:08:27.293 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Download image] ***********************",
                                "Sunday 01 March 2026  22:12:41 +0000 (0:00:00.069)       0:08:27.363 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Get image format] *********************",
                                "Sunday 01 March 2026  22:12:41 +0000 (0:00:00.090)       0:08:27.453 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***",
                                "Sunday 01 March 2026  22:12:41 +0000 (0:00:00.089)       0:08:27.543 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******",
                                "Sunday 01 March 2026  22:12:41 +0000 (0:00:00.087)       0:08:27.630 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************",
                                "Sunday 01 March 2026  22:12:41 +0000 (0:00:00.092)       0:08:27.722 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************",
                                "Sunday 01 March 2026  22:12:41 +0000 (0:00:00.085)       0:08:27.807 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:12:41 +0000 (0:00:00.233)       0:08:28.041 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:12:41 +0000 (0:00:00.111)       0:08:28.152 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:12:41 +0000 (0:00:00.076)       0:08:28.229 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:12:41 +0000 (0:00:00.071)       0:08:28.300 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:12:42 +0000 (0:00:00.076)       0:08:28.377 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:12:42 +0000 (0:00:00.084)       0:08:28.461 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:12:42 +0000 (0:00:00.090)       0:08:28.552 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:12:42 +0000 (0:00:00.090)       0:08:28.643 **********",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=staffeln)",
                                "ok: [instance] => (item=oslo_db)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:12:42 +0000 (0:00:00.127)       0:08:28.770 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:12:42 +0000 (0:00:00.272)       0:08:29.043 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:12:42 +0000 (0:00:00.118)       0:08:29.161 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.staffeln : Deploy Helm chart] ************************",
                                "Sunday 01 March 2026  22:12:43 +0000 (0:00:00.609)       0:08:29.770 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:12:45 +0000 (0:00:02.004)       0:08:31.775 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:12:45 +0000 (0:00:00.129)       0:08:31.905 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  22:12:45 +0000 (0:00:00.148)       0:08:32.053 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  22:12:46 +0000 (0:00:00.778)       0:08:32.831 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  22:12:46 +0000 (0:00:00.178)       0:08:33.009 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  22:12:46 +0000 (0:00:00.198)       0:08:33.208 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  22:12:47 +0000 (0:00:00.188)       0:08:33.396 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:12:47 +0000 (0:00:00.924)       0:08:34.321 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:12:48 +0000 (0:00:00.739)       0:08:35.061 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:12:48 +0000 (0:00:00.080)       0:08:35.141 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:12:48 +0000 (0:00:00.079)       0:08:35.221 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:12:48 +0000 (0:00:00.089)       0:08:35.311 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:12:49 +0000 (0:00:00.084)       0:08:35.395 **********",
                                "ok: [instance] => (item=volumev3)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=image)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:12:49 +0000 (0:00:00.165)       0:08:35.561 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:12:49 +0000 (0:00:00.126)       0:08:35.688 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:12:49 +0000 (0:00:00.120)       0:08:35.809 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cinder : Generate Helm values] ***********************",
                                "Sunday 01 March 2026  22:12:50 +0000 (0:00:00.625)       0:08:36.434 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cinder : Append Helm values (Staffeln)] **************",
                                "Sunday 01 March 2026  22:12:50 +0000 (0:00:00.469)       0:08:36.903 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.cinder : Deploy Helm chart] **************************",
                                "Sunday 01 March 2026  22:12:50 +0000 (0:00:00.404)       0:08:37.308 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:12:53 +0000 (0:00:02.502)       0:08:39.811 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:12:53 +0000 (0:00:00.172)       0:08:39.984 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:12:53 +0000 (0:00:00.088)       0:08:40.072 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:12:53 +0000 (0:00:00.081)       0:08:40.154 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress volumev3] *************************************************",
                                "Sunday 01 March 2026  22:12:53 +0000 (0:00:00.091)       0:08:40.245 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress volumev3] *******************",
                                "Sunday 01 March 2026  22:12:54 +0000 (0:00:00.145)       0:08:40.390 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:12:55 +0000 (0:00:01.232)       0:08:41.623 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:12:55 +0000 (0:00:00.116)       0:08:41.739 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:12:55 +0000 (0:00:00.079)       0:08:41.819 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:12:55 +0000 (0:00:00.085)       0:08:41.904 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:12:55 +0000 (0:00:00.082)       0:08:41.986 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:12:55 +0000 (0:00:00.096)       0:08:42.083 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:12:55 +0000 (0:00:00.080)       0:08:42.164 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:12:55 +0000 (0:00:00.092)       0:08:42.256 **********",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=placement)",
                                "ok: [instance] => (item=oslo_db)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:12:56 +0000 (0:00:00.130)       0:08:42.386 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:12:56 +0000 (0:00:00.128)       0:08:42.514 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:12:56 +0000 (0:00:00.116)       0:08:42.631 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.placement : Deploy Helm chart] ***********************",
                                "Sunday 01 March 2026  22:12:56 +0000 (0:00:00.616)       0:08:43.248 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:12:59 +0000 (0:00:02.293)       0:08:45.541 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:12:59 +0000 (0:00:00.165)       0:08:45.706 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:12:59 +0000 (0:00:00.082)       0:08:45.789 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:12:59 +0000 (0:00:00.093)       0:08:45.883 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress placement] ************************************************",
                                "Sunday 01 March 2026  22:12:59 +0000 (0:00:00.101)       0:08:45.984 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress placement] ******************",
                                "Sunday 01 March 2026  22:12:59 +0000 (0:00:00.120)       0:08:46.104 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [Configure operating system] **********************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:13:00 +0000 (0:00:01.151)       0:08:47.255 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.lpfc : Detect if the \"lpfc\" module is loaded] ********",
                                "Sunday 01 March 2026  22:13:04 +0000 (0:00:04.076)       0:08:51.332 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.lpfc : Install the configuration file] ***************",
                                "Sunday 01 March 2026  22:13:05 +0000 (0:00:00.241)       0:08:51.574 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.lpfc : Get the values for the module parameters] *****",
                                "Sunday 01 March 2026  22:13:05 +0000 (0:00:00.078)       0:08:51.653 **********",
                                "skipping: [instance] => (item=lpfc_lun_queue_depth)",
                                "skipping: [instance] => (item=lpfc_sg_seg_cnt)",
                                "skipping: [instance] => (item=lpfc_max_luns)",
                                "skipping: [instance] => (item=lpfc_enable_fc4_type)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.lpfc : Detect if the run-time module parameters are set correctly] ***",
                                "Sunday 01 March 2026  22:13:05 +0000 (0:00:00.086)       0:08:51.740 **********",
                                "skipping: [instance] => (item=lpfc_lun_queue_depth)",
                                "skipping: [instance] => (item=lpfc_sg_seg_cnt)",
                                "skipping: [instance] => (item=lpfc_max_luns)",
                                "skipping: [instance] => (item=lpfc_enable_fc4_type)",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.lpfc : Update \"initramfs\" if the configuration file has changed] ***",
                                "Sunday 01 March 2026  22:13:05 +0000 (0:00:00.088)       0:08:51.828 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [Reboot the system if the configuration file has changed] *****************",
                                "Sunday 01 March 2026  22:13:05 +0000 (0:00:00.085)       0:08:51.914 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Add backports PPA] **********************",
                                "Sunday 01 March 2026  22:13:05 +0000 (0:00:00.099)       0:08:52.013 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Install the multipathd package] *********",
                                "Sunday 01 March 2026  22:13:06 +0000 (0:00:00.386)       0:08:52.400 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.multipathd : Install the configuration file] *********",
                                "Sunday 01 March 2026  22:13:07 +0000 (0:00:01.319)       0:08:53.719 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.iscsi : Ensure iscsid is started] ********************",
                                "Sunday 01 March 2026  22:13:07 +0000 (0:00:00.569)       0:08:54.288 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.udev : Add udev rules for Pure Storage FlashArray] ***",
                                "Sunday 01 March 2026  22:13:08 +0000 (0:00:00.458)       0:08:54.746 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.udev : Add udev rules for SCSI Unit Attention] *******",
                                "Sunday 01 March 2026  22:13:08 +0000 (0:00:00.483)       0:08:55.230 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [Deploy SDN] **************************************************************",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:13:09 +0000 (0:00:00.624)       0:08:55.855 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:13:09 +0000 (0:00:00.098)       0:08:55.954 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:13:09 +0000 (0:00:00.071)       0:08:56.025 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:13:09 +0000 (0:00:00.077)       0:08:56.102 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:13:09 +0000 (0:00:00.072)       0:08:56.175 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:13:09 +0000 (0:00:00.078)       0:08:56.253 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:13:09 +0000 (0:00:00.063)       0:08:56.317 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:13:10 +0000 (0:00:00.077)       0:08:56.394 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:13:10 +0000 (0:00:00.079)       0:08:56.474 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:13:10 +0000 (0:00:00.137)       0:08:56.611 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:13:10 +0000 (0:00:00.114)       0:08:56.726 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openvswitch : Get the current status of all systemd values for containerd] ***",
                                "Sunday 01 March 2026  22:13:10 +0000 (0:00:00.608)       0:08:57.334 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openvswitch : Assert that LimitMEMLOCK is set to infinity] ***",
                                "Sunday 01 March 2026  22:13:11 +0000 (0:00:00.270)       0:08:57.605 **********",
                                "ok: [instance] => {",
                                "    \"changed\": false,",
                                "    \"msg\": \"All assertions passed\"",
                                "}",
                                "",
                                "TASK [vexxhost.atmosphere.openvswitch : Pull openvswitch image] ****************",
                                "Sunday 01 March 2026  22:13:11 +0000 (0:00:00.071)       0:08:57.676 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openvswitch : Verify openvswitch image pull] *********",
                                "Sunday 01 March 2026  22:13:11 +0000 (0:00:00.528)       0:08:58.205 **********",
                                "ok: [instance] => {",
                                "    \"changed\": false,",
                                "    \"msg\": \"Successfully pulled openvswitch image\"",
                                "}",
                                "",
                                "TASK [vexxhost.atmosphere.openvswitch : Deploy Helm chart] *********************",
                                "Sunday 01 March 2026  22:13:11 +0000 (0:00:00.072)       0:08:58.278 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:13:13 +0000 (0:00:01.731)       0:09:00.009 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:13:13 +0000 (0:00:00.302)       0:09:00.312 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ovn : Check if ovn_controller DaemonSet exists] ******",
                                "Sunday 01 March 2026  22:13:14 +0000 (0:00:00.617)       0:09:00.929 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ovn : Delete existing ovn controller DaemonSet if type label is found] ***",
                                "Sunday 01 March 2026  22:13:15 +0000 (0:00:00.703)       0:09:01.633 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ovn : Pull ovn-controller image] *********************",
                                "Sunday 01 March 2026  22:13:15 +0000 (0:00:00.084)       0:09:01.717 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.ovn : Verify ovn-controller image pull] **************",
                                "Sunday 01 March 2026  22:13:15 +0000 (0:00:00.531)       0:09:02.249 **********",
                                "ok: [instance] => {",
                                "    \"changed\": false,",
                                "    \"msg\": \"Successfully pulled ovn-controller image\"",
                                "}",
                                "",
                                "TASK [vexxhost.atmosphere.ovn : Deploy Helm chart] *****************************",
                                "Sunday 01 March 2026  22:13:15 +0000 (0:00:00.083)       0:09:02.333 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [controllers[0]] **********************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:13:18 +0000 (0:00:02.081)       0:09:04.414 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:13:22 +0000 (0:00:03.979)       0:09:08.394 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:13:22 +0000 (0:00:00.099)       0:09:08.493 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:13:22 +0000 (0:00:00.064)       0:09:08.558 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:13:22 +0000 (0:00:00.078)       0:09:08.636 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:13:22 +0000 (0:00:00.193)       0:09:08.829 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:13:22 +0000 (0:00:00.075)       0:09:08.905 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:13:22 +0000 (0:00:00.096)       0:09:09.001 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:13:22 +0000 (0:00:00.094)       0:09:09.096 **********",
                                "ok: [instance] => (item=libvirt_exporter)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:13:22 +0000 (0:00:00.093)       0:09:09.189 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:13:22 +0000 (0:00:00.137)       0:09:09.327 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:13:23 +0000 (0:00:00.137)       0:09:09.465 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.libvirt : Create CA certificates] ********************",
                                "Sunday 01 March 2026  22:13:23 +0000 (0:00:00.633)       0:09:10.098 **********",
                                "ok: [instance] => (item=libvirt-vnc)",
                                "ok: [instance] => (item=libvirt-api)",
                                "",
                                "TASK [vexxhost.atmosphere.libvirt : Create Issuers] ****************************",
                                "Sunday 01 March 2026  22:13:25 +0000 (0:00:01.405)       0:09:11.504 **********",
                                "ok: [instance] => (item=libvirt-vnc)",
                                "ok: [instance] => (item=libvirt-api)",
                                "",
                                "TASK [vexxhost.atmosphere.libvirt : Deploy Helm chart] *************************",
                                "Sunday 01 March 2026  22:13:26 +0000 (0:00:01.401)       0:09:12.905 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:13:28 +0000 (0:00:02.045)       0:09:14.950 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:13:28 +0000 (0:00:00.113)       0:09:15.064 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.coredns : Deploy Helm chart] *************************",
                                "Sunday 01 March 2026  22:13:29 +0000 (0:00:00.583)       0:09:15.647 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstacksdk : Install openstacksdk] *****************",
                                "Sunday 01 March 2026  22:13:29 +0000 (0:00:00.162)       0:09:15.810 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstacksdk : Create openstack config directory] ****",
                                "Sunday 01 March 2026  22:13:30 +0000 (0:00:00.993)       0:09:16.803 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstacksdk : Generate cloud config file] ***********",
                                "Sunday 01 March 2026  22:13:30 +0000 (0:00:00.248)       0:09:17.052 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:13:31 +0000 (0:00:00.607)       0:09:17.660 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:13:31 +0000 (0:00:00.280)       0:09:17.940 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  22:13:31 +0000 (0:00:00.267)       0:09:18.208 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  22:13:32 +0000 (0:00:00.721)       0:09:18.929 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  22:13:32 +0000 (0:00:00.205)       0:09:19.135 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  22:13:33 +0000 (0:00:00.216)       0:09:19.351 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  22:13:33 +0000 (0:00:00.180)       0:09:19.532 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:13:34 +0000 (0:00:00.981)       0:09:20.513 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:13:34 +0000 (0:00:00.696)       0:09:21.209 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:13:34 +0000 (0:00:00.089)       0:09:21.298 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:13:35 +0000 (0:00:00.082)       0:09:21.381 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:13:35 +0000 (0:00:00.080)       0:09:21.462 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:13:35 +0000 (0:00:00.074)       0:09:21.536 **********",
                                "ok: [instance] => (item=volumev3)",
                                "ok: [instance] => (item=baremetal)",
                                "ok: [instance] => (item=compute_novnc_proxy)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=placement)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=compute)",
                                "ok: [instance] => (item=oslo_db_api)",
                                "ok: [instance] => (item=image)",
                                "ok: [instance] => (item=network)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=compute_metadata)",
                                "ok: [instance] => (item=oslo_db_cell0)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:13:35 +0000 (0:00:00.260)       0:09:21.797 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:13:35 +0000 (0:00:00.126)       0:09:21.923 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:13:35 +0000 (0:00:00.129)       0:09:22.053 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Generate temporary file for SSH public key] ***",
                                "Sunday 01 March 2026  22:13:36 +0000 (0:00:00.636)       0:09:22.689 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Write contents of current private SSH key] ****",
                                "Sunday 01 March 2026  22:13:36 +0000 (0:00:00.434)       0:09:23.123 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Generate public key for SSH private key] ******",
                                "Sunday 01 March 2026  22:13:37 +0000 (0:00:00.648)       0:09:23.772 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Delete temporary file for public SSH key] *****",
                                "Sunday 01 March 2026  22:13:37 +0000 (0:00:00.546)       0:09:24.318 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Remove nova-bootstrap and nova-cell-setup job] ***",
                                "Sunday 01 March 2026  22:13:38 +0000 (0:00:00.230)       0:09:24.549 **********",
                                "ok: [instance] => (item=nova-bootstrap)",
                                "ok: [instance] => (item=nova-cell-setup)",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Deploy Helm chart] ****************************",
                                "Sunday 01 March 2026  22:13:39 +0000 (0:00:01.428)       0:09:25.978 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:13:43 +0000 (0:00:03.751)       0:09:29.730 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:13:43 +0000 (0:00:00.404)       0:09:30.135 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:13:43 +0000 (0:00:00.080)       0:09:30.215 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:13:43 +0000 (0:00:00.083)       0:09:30.299 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress compute] **************************************************",
                                "Sunday 01 March 2026  22:13:44 +0000 (0:00:00.098)       0:09:30.398 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress compute] ********************",
                                "Sunday 01 March 2026  22:13:44 +0000 (0:00:00.127)       0:09:30.526 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:13:45 +0000 (0:00:01.188)       0:09:31.714 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:13:45 +0000 (0:00:00.180)       0:09:31.894 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:13:45 +0000 (0:00:00.085)       0:09:31.980 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:13:45 +0000 (0:00:00.089)       0:09:32.069 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress compute-novnc-proxy] **************************************",
                                "Sunday 01 March 2026  22:13:45 +0000 (0:00:00.105)       0:09:32.174 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress compute-novnc-proxy] ********",
                                "Sunday 01 March 2026  22:13:45 +0000 (0:00:00.130)       0:09:32.305 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Wait until compute api service ready] *********",
                                "Sunday 01 March 2026  22:13:47 +0000 (0:00:01.360)       0:09:33.665 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.nova : Create flavors] *******************************",
                                "Sunday 01 March 2026  22:13:48 +0000 (0:00:00.802)       0:09:34.468 **********",
                                "ok: [instance] => (item={'disk': 1, 'name': 'm1.tiny', 'ram': 512, 'vcpus': 1})",
                                "ok: [instance] => (item={'disk': 20, 'name': 'm1.small', 'ram': 2048, 'vcpus': 1})",
                                "ok: [instance] => (item={'disk': 40, 'name': 'm1.medium', 'ram': 4096, 'vcpus': 2})",
                                "ok: [instance] => (item={'disk': 80, 'name': 'm1.large', 'ram': 8192, 'vcpus': 4})",
                                "ok: [instance] => (item={'disk': 160, 'name': 'm1.xlarge', 'ram': 16384, 'vcpus': 8})",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:13:53 +0000 (0:00:05.473)       0:09:39.941 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:13:53 +0000 (0:00:00.307)       0:09:40.249 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  22:13:54 +0000 (0:00:00.137)       0:09:40.386 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  22:13:54 +0000 (0:00:00.737)       0:09:41.123 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  22:13:55 +0000 (0:00:00.374)       0:09:41.497 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  22:13:55 +0000 (0:00:00.401)       0:09:41.899 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  22:13:55 +0000 (0:00:00.200)       0:09:42.099 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:13:57 +0000 (0:00:01.264)       0:09:43.363 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:13:57 +0000 (0:00:00.752)       0:09:44.116 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:13:57 +0000 (0:00:00.085)       0:09:44.201 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:13:57 +0000 (0:00:00.088)       0:09:44.289 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:13:58 +0000 (0:00:00.085)       0:09:44.375 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:13:58 +0000 (0:00:00.085)       0:09:44.461 **********",
                                "ok: [instance] => (item=oslo_messaging)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=compute)",
                                "ok: [instance] => (item=compute_metadata)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=network)",
                                "ok: [instance] => (item=dns)",
                                "ok: [instance] => (item=baremetal)",
                                "ok: [instance] => (item=load_balancer)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:13:58 +0000 (0:00:00.190)       0:09:44.652 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:13:58 +0000 (0:00:00.128)       0:09:44.780 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:13:58 +0000 (0:00:00.123)       0:09:44.904 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Set external_dns_driver] *******************",
                                "Sunday 01 March 2026  22:13:59 +0000 (0:00:00.634)       0:09:45.539 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Generate Helm values] **********************",
                                "Sunday 01 March 2026  22:13:59 +0000 (0:00:00.082)       0:09:45.621 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Append Helm values] ************************",
                                "Sunday 01 March 2026  22:13:59 +0000 (0:00:00.348)       0:09:45.970 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Append Helm values (neutron_policy_server)] ***",
                                "Sunday 01 March 2026  22:13:59 +0000 (0:00:00.100)       0:09:46.070 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Deploy Helm chart] *************************",
                                "Sunday 01 March 2026  22:13:59 +0000 (0:00:00.103)       0:09:46.174 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:14:02 +0000 (0:00:02.430)       0:09:48.604 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:14:02 +0000 (0:00:00.184)       0:09:48.789 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:14:02 +0000 (0:00:00.083)       0:09:48.872 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:14:02 +0000 (0:00:00.095)       0:09:48.968 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress network] **************************************************",
                                "Sunday 01 March 2026  22:14:02 +0000 (0:00:00.096)       0:09:49.064 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress network] ********************",
                                "Sunday 01 March 2026  22:14:02 +0000 (0:00:00.134)       0:09:49.199 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Wait until network service ready] **********",
                                "Sunday 01 March 2026  22:14:04 +0000 (0:00:01.257)       0:09:50.457 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Create networks] ***************************",
                                "Sunday 01 March 2026  22:14:05 +0000 (0:00:00.891)       0:09:51.348 **********",
                                "ok: [instance] => (item={'external': True, 'mtu_size': 1500, 'name': 'public', 'port_security_enabled': True, 'provider_network_type': 'flat', 'provider_physical_network': 'external', 'shared': True, 'subnets': [{'allocation_pool_end': '10.96.250.220', 'allocation_pool_start': '10.96.250.200', 'cidr': '10.96.250.0/24', 'enable_dhcp': True, 'gateway_ip': '10.96.250.10', 'name': 'public-subnet'}]})",
                                "",
                                "TASK [vexxhost.atmosphere.neutron : Create subnets] ****************************",
                                "Sunday 01 March 2026  22:14:06 +0000 (0:00:01.130)       0:09:52.479 **********",
                                "ok: [instance] => (item=[{'external': True, 'mtu_size': 1500, 'name': 'public', 'port_security_enabled': True, 'provider_network_type': 'flat', 'provider_physical_network': 'external', 'shared': True}, {'allocation_pool_end': '10.96.250.220', 'allocation_pool_start': '10.96.250.200', 'cidr': '10.96.250.0/24', 'enable_dhcp': True, 'gateway_ip': '10.96.250.10', 'name': 'public-subnet'}])",
                                "",
                                "TASK [vexxhost.atmosphere.senlin : Remove OpenStack endpoints] *****************",
                                "Sunday 01 March 2026  22:14:07 +0000 (0:00:01.275)       0:09:53.754 **********",
                                "ok: [instance] => (item=public)",
                                "ok: [instance] => (item=admin)",
                                "ok: [instance] => (item=internal)",
                                "",
                                "TASK [vexxhost.atmosphere.senlin : Remove OpenStack service] *******************",
                                "Sunday 01 March 2026  22:14:10 +0000 (0:00:02.649)       0:09:56.404 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.senlin : Remove Ingress] *****************************",
                                "Sunday 01 March 2026  22:14:10 +0000 (0:00:00.897)       0:09:57.301 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.senlin : Remove Helm chart] **************************",
                                "Sunday 01 March 2026  22:14:11 +0000 (0:00:00.756)       0:09:58.058 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.senlin : Remove OpenStack user] **********************",
                                "Sunday 01 March 2026  22:14:12 +0000 (0:00:00.626)       0:09:58.684 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:14:13 +0000 (0:00:01.245)       0:09:59.929 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:14:13 +0000 (0:00:00.112)       0:10:00.041 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  22:14:13 +0000 (0:00:00.149)       0:10:00.190 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  22:14:14 +0000 (0:00:00.737)       0:10:00.928 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  22:14:14 +0000 (0:00:00.179)       0:10:01.107 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  22:14:14 +0000 (0:00:00.178)       0:10:01.285 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  22:14:15 +0000 (0:00:00.178)       0:10:01.464 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:14:16 +0000 (0:00:00.924)       0:10:02.388 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:14:16 +0000 (0:00:00.721)       0:10:03.110 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:14:16 +0000 (0:00:00.088)       0:10:03.198 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:14:16 +0000 (0:00:00.086)       0:10:03.285 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:14:17 +0000 (0:00:00.095)       0:10:03.380 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:14:17 +0000 (0:00:00.089)       0:10:03.469 **********",
                                "ok: [instance] => (item=orchestration)",
                                "ok: [instance] => (item=cloudformation)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:14:17 +0000 (0:00:00.139)       0:10:03.609 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:14:17 +0000 (0:00:00.113)       0:10:03.722 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:14:17 +0000 (0:00:00.120)       0:10:03.843 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.heat : Deploy Helm chart] ****************************",
                                "Sunday 01 March 2026  22:14:18 +0000 (0:00:00.648)       0:10:04.491 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:14:20 +0000 (0:00:02.788)       0:10:07.280 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:14:21 +0000 (0:00:00.275)       0:10:07.555 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:14:21 +0000 (0:00:00.072)       0:10:07.628 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:14:21 +0000 (0:00:00.098)       0:10:07.726 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress orchestration] ********************************************",
                                "Sunday 01 March 2026  22:14:21 +0000 (0:00:00.093)       0:10:07.819 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress orchestration] **************",
                                "Sunday 01 March 2026  22:14:21 +0000 (0:00:00.132)       0:10:07.952 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:14:22 +0000 (0:00:01.117)       0:10:09.069 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:14:22 +0000 (0:00:00.170)       0:10:09.240 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:14:22 +0000 (0:00:00.074)       0:10:09.314 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:14:23 +0000 (0:00:00.094)       0:10:09.409 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress cloudformation] *******************************************",
                                "Sunday 01 March 2026  22:14:23 +0000 (0:00:00.090)       0:10:09.500 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress cloudformation] *************",
                                "Sunday 01 March 2026  22:14:23 +0000 (0:00:00.127)       0:10:09.627 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/etc/nerdctl)] *********",
                                "Sunday 01 March 2026  22:14:24 +0000 (0:00:01.379)       0:10:11.007 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  22:14:24 +0000 (0:00:00.305)       0:10:11.313 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Sunday 01 March 2026  22:14:26 +0000 (0:00:01.293)       0:10:12.606 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:14:26 +0000 (0:00:00.324)       0:10:12.931 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/nerdctl/releases/download/v2.2.0/nerdctl-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:14:26 +0000 (0:00:00.129)       0:10:13.060 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:14:27 +0000 (0:00:00.582)       0:10:13.643 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.nerdctl : Create nerdctl config] *********************",
                                "Sunday 01 March 2026  22:14:28 +0000 (0:00:01.399)       0:10:15.043 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Uninstall OpenStack client system packages] ***",
                                "Sunday 01 March 2026  22:14:29 +0000 (0:00:00.513)       0:10:15.556 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Uninstall Ubuntu Cloud Archive keyring] ***",
                                "Sunday 01 March 2026  22:14:30 +0000 (0:00:00.821)       0:10:16.378 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Remove Ubuntu Cloud Archive repository] ***",
                                "Sunday 01 March 2026  22:14:30 +0000 (0:00:00.781)       0:10:17.159 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Generate OpenStack-Helm endpoints] ***************************************",
                                "Sunday 01 March 2026  22:14:31 +0000 (0:00:00.410)       0:10:17.570 **********",
                                "included: openstack_helm_endpoints for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:14:31 +0000 (0:00:00.201)       0:10:17.772 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:14:31 +0000 (0:00:00.100)       0:10:17.872 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:14:31 +0000 (0:00:00.088)       0:10:17.960 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:14:31 +0000 (0:00:00.088)       0:10:18.049 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:14:31 +0000 (0:00:00.079)       0:10:18.129 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:14:31 +0000 (0:00:00.088)       0:10:18.217 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:14:31 +0000 (0:00:00.089)       0:10:18.307 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:14:32 +0000 (0:00:00.090)       0:10:18.397 **********",
                                "ok: [instance] => (item=identity)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:14:32 +0000 (0:00:00.090)       0:10:18.487 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Generate openrc file] ****************",
                                "Sunday 01 March 2026  22:14:32 +0000 (0:00:00.117)       0:10:18.605 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Generate openstack aliases] **********",
                                "Sunday 01 March 2026  22:14:32 +0000 (0:00:00.506)       0:10:19.111 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:14:33 +0000 (0:00:00.631)       0:10:19.742 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:14:33 +0000 (0:00:00.119)       0:10:19.862 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  22:14:33 +0000 (0:00:00.139)       0:10:20.002 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  22:14:34 +0000 (0:00:00.799)       0:10:20.801 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  22:14:34 +0000 (0:00:00.209)       0:10:21.011 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  22:14:34 +0000 (0:00:00.202)       0:10:21.213 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  22:14:35 +0000 (0:00:00.384)       0:10:21.597 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:14:36 +0000 (0:00:01.230)       0:10:22.828 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:14:37 +0000 (0:00:00.782)       0:10:23.610 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:14:37 +0000 (0:00:00.094)       0:10:23.705 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:14:37 +0000 (0:00:00.094)       0:10:23.799 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:14:37 +0000 (0:00:00.081)       0:10:23.881 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:14:37 +0000 (0:00:00.070)       0:10:23.951 **********",
                                "ok: [instance] => (item=valkey)",
                                "ok: [instance] => (item=oslo_db_persistence)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=network)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "ok: [instance] => (item=load_balancer)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:14:37 +0000 (0:00:00.168)       0:10:24.120 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:14:37 +0000 (0:00:00.121)       0:10:24.241 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:14:38 +0000 (0:00:00.119)       0:10:24.360 **********",
                                "ok: [instance]",
                                "",
                                "TASK [atmosphere.common.secretgen_controller : Deploy secretgen-controller] ****",
                                "Sunday 01 March 2026  22:14:38 +0000 (0:00:00.630)       0:10:24.991 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create management network] *****************",
                                "Sunday 01 March 2026  22:14:40 +0000 (0:00:01.502)       0:10:26.494 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create management subnet] ******************",
                                "Sunday 01 March 2026  22:14:41 +0000 (0:00:01.045)       0:10:27.540 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create health manager security group] ******",
                                "Sunday 01 March 2026  22:14:42 +0000 (0:00:01.058)       0:10:28.598 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create health manager security group rules] ***",
                                "Sunday 01 March 2026  22:14:43 +0000 (0:00:01.281)       0:10:29.880 **********",
                                "ok: [instance] => (item={'protocol': 'udp', 'port': 5555})",
                                "ok: [instance] => (item={'protocol': 'udp', 'port': 10514})",
                                "ok: [instance] => (item={'protocol': 'udp', 'port': 20514})",
                                "ok: [instance] => (item={'protocol': 'tcp', 'port': 10514})",
                                "ok: [instance] => (item={'protocol': 'tcp', 'port': 20514})",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create health manager networking ports] ****",
                                "Sunday 01 March 2026  22:14:48 +0000 (0:00:05.241)       0:10:35.122 **********",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Discover facts for other controllers] ******",
                                "Sunday 01 March 2026  22:14:50 +0000 (0:00:01.444)       0:10:36.567 **********",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Set binding for ports] *********************",
                                "Sunday 01 March 2026  22:14:53 +0000 (0:00:03.177)       0:10:39.744 **********",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Get health manager networking ports] *******",
                                "Sunday 01 March 2026  22:14:57 +0000 (0:00:03.892)       0:10:43.636 **********",
                                "ok: [instance] => (item=instance)",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Set controller_ip_port_list] ***************",
                                "Sunday 01 March 2026  22:14:58 +0000 (0:00:01.165)       0:10:44.802 **********",
                                "ok: [instance] => (item=octavia-health-manager-port-instance)",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create amphora security group] *************",
                                "Sunday 01 March 2026  22:14:58 +0000 (0:00:00.110)       0:10:44.912 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create amphora security group rules] *******",
                                "Sunday 01 March 2026  22:14:59 +0000 (0:00:00.948)       0:10:45.860 **********",
                                "ok: [instance] => (item=[22, {'changed': False, 'ports': [{'allowed_address_pairs': [], 'binding_host_id': 'instance', 'binding_profile': {}, 'binding_vif_details': {'port_filter': True, 'connectivity': 'l2', 'bridge_name': 'br-int', 'datapath_type': 'system', 'bound_drivers': {'0': 'ovn'}}, 'binding_vif_type': 'ovs', 'binding_vnic_type': 'normal', 'created_at': '2026-03-01T21:50:27Z', 'data_plane_status': None, 'description': '', 'device_id': '', 'device_owner': 'octavia:health-mgr', 'device_profile': None, 'dns_assignment': [{'ip_address': '172.24.2.21', 'hostname': 'host-172-24-2-21', 'fqdn': 'host-172-24-2-21.openstacklocal.'}], 'dns_domain': '', 'dns_name': '', 'extra_dhcp_opts': [], 'fixed_ips': [{'subnet_id': '64e18f24-c519-4c88-a50c-38ef854b39a1', 'ip_address': '172.24.2.21'}], 'hardware_offload_type': None, 'ip_allocation': 'immediate', 'is_admin_state_up': True, 'is_port_security_enabled': True, 'mac_address': 'fa:16:3e:3d:ac:d9', 'name': 'octavia-health-manager-port-instance', 'network_id': 'bb482a7a-c46c-457a-a186-b44bfb1280f8', 'numa_affinity_policy': None, 'project_id': 'b6357918f73b458f9e03a5667d465a0e', 'tenant_id': 'b6357918f73b458f9e03a5667d465a0e', 'propagate_uplink_status': None, 'qos_network_policy_id': None, 'qos_policy_id': None, 'resource_request': None, 'security_group_ids': ['5ab21866-8cda-4495-bce8-e3077a52ee5a'], 'status': 'ACTIVE', 'trunk_details': None, 'trusted': None, 'updated_at': '2026-03-01T21:53:57Z', 'revision_number': 4, 'if_match': None, 'id': '0b032e00-fdd0-4807-8cd5-933cf322bf30', 'tags': []}], 'invocation': {'module_args': {'port': 'octavia-health-manager-port-instance', 'name': 'octavia-health-manager-port-instance', 'wait': True, 'timeout': 180, 'interface': 'public', 'sdk_log_level': 'INFO', 'auth_type': None, 'auth': None, 'region_name': None, 'validate_certs': None, 'ca_cert': None, 'client_cert': None, 'client_key': None, 'api_timeout': None, 'sdk_log_path': None, 'filters': None}}, 'failed': False, 'item': 'instance', 'ansible_loop_var': 'item'}])",
                                "ok: [instance] => (item=[9443, {'changed': False, 'ports': [{'allowed_address_pairs': [], 'binding_host_id': 'instance', 'binding_profile': {}, 'binding_vif_details': {'port_filter': True, 'connectivity': 'l2', 'bridge_name': 'br-int', 'datapath_type': 'system', 'bound_drivers': {'0': 'ovn'}}, 'binding_vif_type': 'ovs', 'binding_vnic_type': 'normal', 'created_at': '2026-03-01T21:50:27Z', 'data_plane_status': None, 'description': '', 'device_id': '', 'device_owner': 'octavia:health-mgr', 'device_profile': None, 'dns_assignment': [{'ip_address': '172.24.2.21', 'hostname': 'host-172-24-2-21', 'fqdn': 'host-172-24-2-21.openstacklocal.'}], 'dns_domain': '', 'dns_name': '', 'extra_dhcp_opts': [], 'fixed_ips': [{'subnet_id': '64e18f24-c519-4c88-a50c-38ef854b39a1', 'ip_address': '172.24.2.21'}], 'hardware_offload_type': None, 'ip_allocation': 'immediate', 'is_admin_state_up': True, 'is_port_security_enabled': True, 'mac_address': 'fa:16:3e:3d:ac:d9', 'name': 'octavia-health-manager-port-instance', 'network_id': 'bb482a7a-c46c-457a-a186-b44bfb1280f8', 'numa_affinity_policy': None, 'project_id': 'b6357918f73b458f9e03a5667d465a0e', 'tenant_id': 'b6357918f73b458f9e03a5667d465a0e', 'propagate_uplink_status': None, 'qos_network_policy_id': None, 'qos_policy_id': None, 'resource_request': None, 'security_group_ids': ['5ab21866-8cda-4495-bce8-e3077a52ee5a'], 'status': 'ACTIVE', 'trunk_details': None, 'trusted': None, 'updated_at': '2026-03-01T21:53:57Z', 'revision_number': 4, 'if_match': None, 'id': '0b032e00-fdd0-4807-8cd5-933cf322bf30', 'tags': []}], 'invocation': {'module_args': {'port': 'octavia-health-manager-port-instance', 'name': 'octavia-health-manager-port-instance', 'wait': True, 'timeout': 180, 'interface': 'public', 'sdk_log_level': 'INFO', 'auth_type': None, 'auth': None, 'region_name': None, 'validate_certs': None, 'ca_cert': None, 'client_cert': None, 'client_key': None, 'api_timeout': None, 'sdk_log_path': None, 'filters': None}}, 'failed': False, 'item': 'instance', 'ansible_loop_var': 'item'}])",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create amphora flavor] *********************",
                                "Sunday 01 March 2026  22:15:01 +0000 (0:00:02.143)       0:10:48.003 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Upload Amphora image] ****************************************************",
                                "Sunday 01 March 2026  22:15:02 +0000 (0:00:01.028)       0:10:49.032 **********",
                                "included: glance_image for instance",
                                "",
                                "TASK [vexxhost.atmosphere.qemu_utils : Install packages] ***********************",
                                "Sunday 01 March 2026  22:15:02 +0000 (0:00:00.212)       0:10:49.245 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************",
                                "Sunday 01 March 2026  22:15:04 +0000 (0:00:01.432)       0:10:50.678 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****",
                                "Sunday 01 March 2026  22:15:05 +0000 (0:00:01.017)       0:10:51.695 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Download image] ***********************",
                                "Sunday 01 March 2026  22:15:05 +0000 (0:00:00.086)       0:10:51.781 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Get image format] *********************",
                                "Sunday 01 March 2026  22:15:05 +0000 (0:00:00.085)       0:10:51.867 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***",
                                "Sunday 01 March 2026  22:15:05 +0000 (0:00:00.081)       0:10:51.949 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******",
                                "Sunday 01 March 2026  22:15:05 +0000 (0:00:00.083)       0:10:52.033 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************",
                                "Sunday 01 March 2026  22:15:05 +0000 (0:00:00.075)       0:10:52.108 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************",
                                "Sunday 01 March 2026  22:15:05 +0000 (0:00:00.078)       0:10:52.186 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Get Amphora image information] *************",
                                "Sunday 01 March 2026  22:15:05 +0000 (0:00:00.124)       0:10:52.311 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create Amphora SSH key] ********************",
                                "Sunday 01 March 2026  22:15:06 +0000 (0:00:01.007)       0:10:53.319 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Grab generated Amphora public key] *********",
                                "Sunday 01 March 2026  22:15:07 +0000 (0:00:00.807)       0:10:54.127 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Import Amphora SSH key-pair in OpenStack] ***",
                                "Sunday 01 March 2026  22:15:08 +0000 (0:00:00.690)       0:10:54.817 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create CAs & Issuers] **********************",
                                "Sunday 01 March 2026  22:15:09 +0000 (0:00:01.279)       0:10:56.096 **********",
                                "ok: [instance] => (item=octavia-client)",
                                "ok: [instance] => (item=octavia-server)",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create certificate for Octavia clients] ****",
                                "Sunday 01 March 2026  22:15:11 +0000 (0:00:01.484)       0:10:57.581 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Create admin compute quotaset] *************",
                                "Sunday 01 March 2026  22:15:12 +0000 (0:00:01.117)       0:10:58.699 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Deploy Helm chart] *************************",
                                "Sunday 01 March 2026  22:15:13 +0000 (0:00:01.490)       0:11:00.189 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.octavia : Add implied roles] *************************",
                                "Sunday 01 March 2026  22:15:16 +0000 (0:00:02.376)       0:11:02.566 **********",
                                "ok: [instance] => (item={'role': 'member', 'implies': 'load-balancer_member'})",
                                "ok: [instance] => (item={'role': 'reader', 'implies': 'load-balancer_observer'})",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:15:23 +0000 (0:00:07.617)       0:11:10.184 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:15:24 +0000 (0:00:00.195)       0:11:10.379 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:15:24 +0000 (0:00:00.097)       0:11:10.477 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:15:24 +0000 (0:00:00.077)       0:11:10.554 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress load-balancer] ********************************************",
                                "Sunday 01 March 2026  22:15:24 +0000 (0:00:00.093)       0:11:10.648 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress load-balancer] **************",
                                "Sunday 01 March 2026  22:15:24 +0000 (0:00:00.129)       0:11:10.777 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:15:25 +0000 (0:00:01.266)       0:11:12.044 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:15:25 +0000 (0:00:00.121)       0:11:12.166 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  22:15:25 +0000 (0:00:00.142)       0:11:12.308 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  22:15:26 +0000 (0:00:00.726)       0:11:13.035 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  22:15:26 +0000 (0:00:00.204)       0:11:13.239 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  22:15:27 +0000 (0:00:00.219)       0:11:13.458 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  22:15:27 +0000 (0:00:00.201)       0:11:13.660 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:15:28 +0000 (0:00:00.874)       0:11:14.534 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:15:28 +0000 (0:00:00.737)       0:11:15.272 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:15:29 +0000 (0:00:00.085)       0:11:15.357 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:15:29 +0000 (0:00:00.097)       0:11:15.455 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:15:29 +0000 (0:00:00.073)       0:11:15.528 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:15:29 +0000 (0:00:00.214)       0:11:15.742 **********",
                                "ok: [instance] => (item=orchestration)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=key_manager)",
                                "ok: [instance] => (item=container_infra)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:15:29 +0000 (0:00:00.173)       0:11:15.916 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:15:29 +0000 (0:00:00.121)       0:11:16.037 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:15:29 +0000 (0:00:00.120)       0:11:16.158 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:15:30 +0000 (0:00:00.626)       0:11:16.784 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/kubernetes-sigs/cluster-api/releases/download/v1.10.5/clusterctl-linux-amd64\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:15:30 +0000 (0:00:00.098)       0:11:16.882 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:15:30 +0000 (0:00:00.425)       0:11:17.308 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.clusterctl : Create a configuration file] ************",
                                "Sunday 01 March 2026  22:15:31 +0000 (0:00:00.121)       0:11:17.430 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:15:31 +0000 (0:00:00.634)       0:11:18.064 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:15:31 +0000 (0:00:00.143)       0:11:18.207 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.openstack_resource_controller : Create build directory] ***",
                                "Sunday 01 March 2026  22:15:32 +0000 (0:00:00.618)       0:11:18.826 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.openstack_resource_controller : Upload Kustomization] ***",
                                "Sunday 01 March 2026  22:15:32 +0000 (0:00:00.255)       0:11:19.081 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.openstack_resource_controller : Generate manifests] ***",
                                "Sunday 01 March 2026  22:15:33 +0000 (0:00:00.568)       0:11:19.649 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.openstack_resource_controller : Apply manifest to cluster] ***",
                                "Sunday 01 March 2026  22:15:34 +0000 (0:00:01.030)       0:11:20.680 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Create a folder for the Cluster API providers] ***",
                                "Sunday 01 March 2026  22:15:37 +0000 (0:00:02.729)       0:11:23.409 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Copy over all provider configuration to the remote system] ***",
                                "Sunday 01 March 2026  22:15:37 +0000 (0:00:00.262)       0:11:23.672 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Get a list of all Cluster API providers] ***",
                                "Sunday 01 March 2026  22:15:39 +0000 (0:00:02.110)       0:11:25.782 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Initialize the management cluster] *****",
                                "Sunday 01 March 2026  22:15:40 +0000 (0:00:00.727)       0:11:26.509 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Parse provider resources into version mapping] ***",
                                "Sunday 01 March 2026  22:15:40 +0000 (0:00:00.073)       0:11:26.583 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Run upgrade if required] ***************",
                                "Sunday 01 March 2026  22:15:40 +0000 (0:00:00.100)       0:11:26.683 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Set node selector for Cluster API components] ***",
                                "Sunday 01 March 2026  22:15:40 +0000 (0:00:00.077)       0:11:26.760 **********",
                                "ok: [instance] => (item={'namespace': 'capi-kubeadm-bootstrap-system', 'name': 'capi-kubeadm-bootstrap-controller-manager'})",
                                "ok: [instance] => (item={'namespace': 'capi-kubeadm-control-plane-system', 'name': 'capi-kubeadm-control-plane-controller-manager'})",
                                "ok: [instance] => (item={'namespace': 'capi-system', 'name': 'capi-controller-manager'})",
                                "ok: [instance] => (item={'namespace': 'capo-system', 'name': 'capo-controller-manager'})",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Set default values for imagePullPolicy in kubeadmConfigSpec of CRDs] ***",
                                "Sunday 01 March 2026  22:15:43 +0000 (0:00:02.879)       0:11:29.640 **********",
                                "skipping: [instance] => (item={'crd': 'kubeadmcontrolplanetemplates.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/1/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/kubeadmConfigSpec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance] => (item={'crd': 'kubeadmcontrolplanetemplates.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/1/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/kubeadmConfigSpec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance] => (item={'crd': 'kubeadmconfigs.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance] => (item={'crd': 'kubeadmconfigs.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance] => (item={'crd': 'kubeadmconfigtemplates.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance] => (item={'crd': 'kubeadmconfigtemplates.bootstrap.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/template/properties/spec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance] => (item={'crd': 'kubeadmcontrolplanes.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/kubeadmConfigSpec/properties/initConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance] => (item={'crd': 'kubeadmcontrolplanes.controlplane.cluster.x-k8s.io', 'path': '/spec/versions/2/schema/openAPIV3Schema/properties/spec/properties/kubeadmConfigSpec/properties/joinConfiguration/properties/nodeRegistration/properties/imagePullPolicy/default'})",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.cluster_api : Set CAPO instance creation timeout] ****",
                                "Sunday 01 March 2026  22:15:43 +0000 (0:00:00.117)       0:11:29.757 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.magnum : Deploy Cluster API for Magnum RBAC] *********",
                                "Sunday 01 March 2026  22:15:44 +0000 (0:00:01.192)       0:11:30.950 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.magnum : Deploy Helm chart] **************************",
                                "Sunday 01 March 2026  22:15:45 +0000 (0:00:00.789)       0:11:31.740 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.magnum : Deploy \"magnum-cluster-api-proxy\"] **********",
                                "Sunday 01 March 2026  22:15:47 +0000 (0:00:02.268)       0:11:34.008 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:15:48 +0000 (0:00:00.797)       0:11:34.806 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:15:48 +0000 (0:00:00.170)       0:11:34.977 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:15:48 +0000 (0:00:00.092)       0:11:35.070 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:15:48 +0000 (0:00:00.089)       0:11:35.159 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress container-infra] ******************************************",
                                "Sunday 01 March 2026  22:15:48 +0000 (0:00:00.095)       0:11:35.254 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress container-infra] ************",
                                "Sunday 01 March 2026  22:15:49 +0000 (0:00:00.141)       0:11:35.395 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.magnum : Deploy magnum registry] *********************",
                                "Sunday 01 March 2026  22:15:50 +0000 (0:00:01.387)       0:11:36.783 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create magnum registry Ingress] ******************************************",
                                "Sunday 01 March 2026  22:15:51 +0000 (0:00:00.828)       0:11:37.612 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:15:51 +0000 (0:00:00.177)       0:11:37.789 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:15:51 +0000 (0:00:00.070)       0:11:37.860 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:15:51 +0000 (0:00:00.072)       0:11:37.932 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress container-infra-registry] *********************************",
                                "Sunday 01 March 2026  22:15:51 +0000 (0:00:00.088)       0:11:38.021 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress container-infra-registry] ***",
                                "Sunday 01 March 2026  22:15:51 +0000 (0:00:00.283)       0:11:38.304 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Upload images] ***********************************************************",
                                "Sunday 01 March 2026  22:15:53 +0000 (0:00:01.158)       0:11:39.463 **********",
                                "included: glance_image for instance => (item={'name': 'ubuntu-2204-kube-v1.34.3', 'url': 'https://github.com/vexxhost/capo-image-elements/releases/download/2025.12-3/ubuntu-22.04-v1.34.3.qcow2', 'distro': 'ubuntu'})",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************",
                                "Sunday 01 March 2026  22:15:53 +0000 (0:00:00.226)       0:11:39.690 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****",
                                "Sunday 01 March 2026  22:15:54 +0000 (0:00:00.944)       0:11:40.634 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Download image] ***********************",
                                "Sunday 01 March 2026  22:15:54 +0000 (0:00:00.074)       0:11:40.708 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Get image format] *********************",
                                "Sunday 01 March 2026  22:15:54 +0000 (0:00:00.098)       0:11:40.807 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***",
                                "Sunday 01 March 2026  22:15:54 +0000 (0:00:00.076)       0:11:40.884 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******",
                                "Sunday 01 March 2026  22:15:54 +0000 (0:00:00.082)       0:11:40.966 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************",
                                "Sunday 01 March 2026  22:15:54 +0000 (0:00:00.079)       0:11:41.046 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************",
                                "Sunday 01 March 2026  22:15:54 +0000 (0:00:00.091)       0:11:41.137 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:15:55 +0000 (0:00:00.243)       0:11:41.380 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:15:55 +0000 (0:00:00.126)       0:11:41.507 **********",
                                "included: rabbitmq for instance",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Verify state of current RabbitMQ cluster] ***",
                                "Sunday 01 March 2026  22:15:55 +0000 (0:00:00.130)       0:11:41.638 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Print diff of RabbitMQ cluster spec] ******",
                                "Sunday 01 March 2026  22:15:56 +0000 (0:00:00.701)       0:11:42.340 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Wait for confirmation of RabbitMQ cluster spec] ***",
                                "Sunday 01 March 2026  22:15:56 +0000 (0:00:00.176)       0:11:42.516 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Enable all feature flags for upgrade] *****",
                                "Sunday 01 March 2026  22:15:56 +0000 (0:00:00.181)       0:11:42.697 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.rabbitmq : Deploy cluster] ***************************",
                                "Sunday 01 March 2026  22:15:56 +0000 (0:00:00.204)       0:11:42.902 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:15:57 +0000 (0:00:00.859)       0:11:43.762 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:15:58 +0000 (0:00:00.703)       0:11:44.465 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:15:58 +0000 (0:00:00.080)       0:11:44.546 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:15:58 +0000 (0:00:00.080)       0:11:44.626 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:15:58 +0000 (0:00:00.074)       0:11:44.701 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:15:58 +0000 (0:00:00.076)       0:11:44.778 **********",
                                "ok: [instance] => (item=sharev2)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=oslo_messaging)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:15:58 +0000 (0:00:00.124)       0:11:44.902 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:15:58 +0000 (0:00:00.288)       0:11:45.191 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:15:58 +0000 (0:00:00.106)       0:11:45.298 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Create flavor] ******************************",
                                "Sunday 01 March 2026  22:15:59 +0000 (0:00:00.577)       0:11:45.875 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Upload service image] ****************************************************",
                                "Sunday 01 March 2026  22:16:00 +0000 (0:00:01.011)       0:11:46.887 **********",
                                "included: glance_image for instance",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Check if image exists] ****************",
                                "Sunday 01 March 2026  22:16:00 +0000 (0:00:00.189)       0:11:47.076 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Generate temporary work directory] ****",
                                "Sunday 01 March 2026  22:16:01 +0000 (0:00:00.943)       0:11:48.020 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Download image] ***********************",
                                "Sunday 01 March 2026  22:16:01 +0000 (0:00:00.078)       0:11:48.098 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Get image format] *********************",
                                "Sunday 01 March 2026  22:16:01 +0000 (0:00:00.088)       0:11:48.187 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Convert file to target disk format] ***",
                                "Sunday 01 March 2026  22:16:01 +0000 (0:00:00.077)       0:11:48.264 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Wait until image service ready] *******",
                                "Sunday 01 March 2026  22:16:02 +0000 (0:00:00.075)       0:11:48.340 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Upload image into Glance] *************",
                                "Sunday 01 March 2026  22:16:02 +0000 (0:00:00.082)       0:11:48.422 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.glance_image : Remove work directory] ****************",
                                "Sunday 01 March 2026  22:16:02 +0000 (0:00:00.076)       0:11:48.498 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Create generic share driver security group] ***",
                                "Sunday 01 March 2026  22:16:02 +0000 (0:00:00.133)       0:11:48.631 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Create generic share driver security group tcp rules] ***",
                                "Sunday 01 March 2026  22:16:03 +0000 (0:00:01.056)       0:11:49.688 **********",
                                "ok: [instance] => (item=22)",
                                "ok: [instance] => (item=111)",
                                "ok: [instance] => (item=2049)",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Create generic share driver security group icmp rules] ***",
                                "Sunday 01 March 2026  22:16:06 +0000 (0:00:03.238)       0:11:52.926 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Generate temporary file for SSH public key] ***",
                                "Sunday 01 March 2026  22:16:07 +0000 (0:00:01.176)       0:11:54.102 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Write contents of current private SSH key] ***",
                                "Sunday 01 March 2026  22:16:07 +0000 (0:00:00.228)       0:11:54.331 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Generate public key for SSH private key] ****",
                                "Sunday 01 March 2026  22:16:08 +0000 (0:00:00.398)       0:11:54.729 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Delete temporary file for public SSH key] ***",
                                "Sunday 01 March 2026  22:16:08 +0000 (0:00:00.327)       0:11:55.057 **********",
                                "ok: [instance -> localhost]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Create secret with the SSH keys] ************",
                                "Sunday 01 March 2026  22:16:08 +0000 (0:00:00.223)       0:11:55.280 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Deploy Helm chart] **************************",
                                "Sunday 01 March 2026  22:16:09 +0000 (0:00:00.767)       0:11:56.048 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:16:11 +0000 (0:00:02.233)       0:11:58.282 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:16:12 +0000 (0:00:00.196)       0:11:58.479 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:16:12 +0000 (0:00:00.085)       0:11:58.564 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:16:12 +0000 (0:00:00.265)       0:11:58.829 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress sharev2] **************************************************",
                                "Sunday 01 March 2026  22:16:12 +0000 (0:00:00.085)       0:11:58.915 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress sharev2] ********************",
                                "Sunday 01 March 2026  22:16:12 +0000 (0:00:00.128)       0:11:59.043 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.manila : Update service tenant quotas] ***************",
                                "Sunday 01 March 2026  22:16:13 +0000 (0:00:01.146)       0:12:00.190 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:16:15 +0000 (0:00:01.375)       0:12:01.565 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:16:15 +0000 (0:00:00.102)       0:12:01.668 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:16:15 +0000 (0:00:00.075)       0:12:01.743 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:16:15 +0000 (0:00:00.079)       0:12:01.822 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:16:15 +0000 (0:00:00.083)       0:12:01.906 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:16:15 +0000 (0:00:00.082)       0:12:01.989 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:16:15 +0000 (0:00:00.075)       0:12:02.064 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:16:15 +0000 (0:00:00.069)       0:12:02.133 **********",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=dashboard)",
                                "ok: [instance] => (item=oslo_cache)",
                                "ok: [instance] => (item=oslo_db)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:16:15 +0000 (0:00:00.099)       0:12:02.233 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:16:16 +0000 (0:00:00.109)       0:12:02.342 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:16:16 +0000 (0:00:00.128)       0:12:02.471 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.horizon : Deploy Helm chart] *************************",
                                "Sunday 01 March 2026  22:16:16 +0000 (0:00:00.615)       0:12:03.086 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress] **********************************************************",
                                "Sunday 01 March 2026  22:16:18 +0000 (0:00:02.225)       0:12:05.311 **********",
                                "included: openstack_helm_ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Create certificate] *********",
                                "Sunday 01 March 2026  22:16:19 +0000 (0:00:00.176)       0:12:05.488 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Set fact with wildcard certificate] ***",
                                "Sunday 01 March 2026  22:16:19 +0000 (0:00:00.086)       0:12:05.575 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_ingress : Add ClusterIssuer annotations] ***",
                                "Sunday 01 March 2026  22:16:19 +0000 (0:00:00.086)       0:12:05.662 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create Ingress dashboard] ************************************************",
                                "Sunday 01 March 2026  22:16:19 +0000 (0:00:00.107)       0:12:05.770 **********",
                                "included: ingress for instance",
                                "",
                                "TASK [vexxhost.atmosphere.ingress : Create Ingress dashboard] ******************",
                                "Sunday 01 March 2026  22:16:19 +0000 (0:00:00.266)       0:12:06.037 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_exporter : Deploy service] *****************",
                                "Sunday 01 March 2026  22:16:20 +0000 (0:00:01.186)       0:12:07.224 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_exporter : Fetch Neutron DB secret] ********",
                                "Sunday 01 March 2026  22:16:21 +0000 (0:00:00.850)       0:12:08.074 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_exporter : Fetch Nova DB secret] ***********",
                                "Sunday 01 March 2026  22:16:22 +0000 (0:00:00.695)       0:12:08.770 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_exporter : Fetch Octavia DB secret] ********",
                                "Sunday 01 March 2026  22:16:23 +0000 (0:00:00.759)       0:12:09.529 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_exporter : Create \"openstack-database-exporter-dsn\" secret] ***",
                                "Sunday 01 March 2026  22:16:23 +0000 (0:00:00.725)       0:12:10.255 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_exporter : Deploy service] *****************",
                                "Sunday 01 March 2026  22:16:24 +0000 (0:00:00.749)       0:12:11.004 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [controllers] *************************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:16:25 +0000 (0:00:00.846)       0:12:11.851 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/etc/nerdctl)] *********",
                                "Sunday 01 March 2026  22:16:29 +0000 (0:00:04.091)       0:12:15.942 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.package : Update state for tar] **********************",
                                "Sunday 01 March 2026  22:16:29 +0000 (0:00:00.306)       0:12:16.248 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.directory : Create directory (/var/lib/downloads)] ***",
                                "Sunday 01 March 2026  22:16:31 +0000 (0:00:01.405)       0:12:17.654 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Starting download of file] *******",
                                "Sunday 01 March 2026  22:16:31 +0000 (0:00:00.308)       0:12:17.963 **********",
                                "ok: [instance] => {",
                                "    \"msg\": \"https://github.com/containerd/nerdctl/releases/download/v2.2.0/nerdctl-2.2.0-linux-amd64.tar.gz\"",
                                "}",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Download item] *******************",
                                "Sunday 01 March 2026  22:16:31 +0000 (0:00:00.122)       0:12:18.085 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.download_artifact : Extract archive] *****************",
                                "Sunday 01 March 2026  22:16:32 +0000 (0:00:00.421)       0:12:18.507 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.containers.nerdctl : Create nerdctl config] *********************",
                                "Sunday 01 March 2026  22:16:33 +0000 (0:00:01.133)       0:12:19.640 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Uninstall OpenStack client system packages] ***",
                                "Sunday 01 March 2026  22:16:34 +0000 (0:00:00.725)       0:12:20.366 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Uninstall Ubuntu Cloud Archive keyring] ***",
                                "Sunday 01 March 2026  22:16:34 +0000 (0:00:00.805)       0:12:21.171 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Remove Ubuntu Cloud Archive repository] ***",
                                "Sunday 01 March 2026  22:16:35 +0000 (0:00:00.909)       0:12:22.081 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Generate OpenStack-Helm endpoints] ***************************************",
                                "Sunday 01 March 2026  22:16:36 +0000 (0:00:00.438)       0:12:22.520 **********",
                                "included: openstack_helm_endpoints for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:16:36 +0000 (0:00:00.167)       0:12:22.688 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:16:36 +0000 (0:00:00.083)       0:12:22.771 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:16:36 +0000 (0:00:00.074)       0:12:22.846 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:16:36 +0000 (0:00:00.072)       0:12:22.918 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:16:36 +0000 (0:00:00.074)       0:12:22.992 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:16:36 +0000 (0:00:00.068)       0:12:23.061 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:16:36 +0000 (0:00:00.070)       0:12:23.131 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:16:36 +0000 (0:00:00.082)       0:12:23.214 **********",
                                "ok: [instance] => (item=identity)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:16:36 +0000 (0:00:00.101)       0:12:23.316 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Generate openrc file] ****************",
                                "Sunday 01 March 2026  22:16:37 +0000 (0:00:00.111)       0:12:23.428 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_cli : Generate openstack aliases] **********",
                                "Sunday 01 March 2026  22:16:37 +0000 (0:00:00.525)       0:12:23.953 **********",
                                "ok: [instance]",
                                "",
                                "PLAY [Configure networking] ****************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:16:38 +0000 (0:00:00.550)       0:12:24.504 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Add IP address to \"br-ex\"] ***********************************************",
                                "Sunday 01 March 2026  22:16:42 +0000 (0:00:04.145)       0:12:28.650 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Set \"br-ex\" interface to \"up\"] *******************************************",
                                "Sunday 01 March 2026  22:16:42 +0000 (0:00:00.258)       0:12:28.908 **********",
                                "ok: [instance]",
                                "",
                                "PLAY RECAP *********************************************************************",
                                "instance                   : ok=747  changed=0    unreachable=0    failed=0    skipped=277  rescued=0    ignored=0",
                                "",
                                "Sunday 01 March 2026  22:16:42 +0000 (0:00:00.229)       0:12:29.138 **********",
                                "===============================================================================",
                                "vexxhost.atmosphere.kube_prometheus_stack : Deploy additional dashboards -- 15.57s",
                                "vexxhost.atmosphere.kube_prometheus_stack : Deploy Helm chart ---------- 15.44s",
                                "vexxhost.atmosphere.rook_ceph_cluster : Collect \"ceph quorum_status\" output from a monitor -- 15.06s",
                                "vexxhost.atmosphere.ceph_provisioners : Collect \"ceph mon dump\" output from a monitor -- 14.93s",
                                "vexxhost.atmosphere.rook_ceph_cluster : Set mgr/cephadm/warn_on_stray_daemons to false -- 14.81s",
                                "vexxhost.ceph.osd : Get `ceph-volume lvm list` status ------------------ 10.98s",
                                "vexxhost.ceph.mon : Validate monitor exist ----------------------------- 10.87s",
                                "vexxhost.ceph.osd : Adopt OSDs to cluster ------------------------------ 10.09s",
                                "vexxhost.atmosphere.kube_prometheus_stack : Install all CRDs ------------ 8.40s",
                                "vexxhost.atmosphere.octavia : Add implied roles ------------------------- 7.62s",
                                "vexxhost.ceph.mon : Get `cephadm ls` status ----------------------------- 5.70s",
                                "vexxhost.ceph.osd : Get `cephadm ls` status ----------------------------- 5.57s",
                                "vexxhost.ceph.osd : Ensure all OSDs are non-legacy ---------------------- 5.50s",
                                "vexxhost.atmosphere.nova : Create flavors ------------------------------- 5.47s",
                                "vexxhost.atmosphere.octavia : Create health manager security group rules --- 5.24s",
                                "Gathering Facts --------------------------------------------------------- 5.16s",
                                "Gathering Facts --------------------------------------------------------- 5.12s",
                                "Gathering Facts --------------------------------------------------------- 5.00s",
                                "vexxhost.atmosphere.kube_prometheus_stack : Create Keycloak roles ------- 4.63s",
                                "Gathering Facts --------------------------------------------------------- 4.39s",
                                "INFO     [aio > idempotence] Executed: Successful",
                                "INFO     [aio > side_effect] Executing",
                                "WARNING  [aio > side_effect] Executed: Missing playbook (Remove from test_sequence to suppress)",
                                "INFO     [aio > verify] Executing",
                                "",
                                "PLAY [Run tests] ***************************************************************",
                                "",
                                "TASK [Run \"stestr\" tests] ******************************************************",
                                "Sunday 01 March 2026  22:16:44 +0000 (0:00:00.009)       0:00:00.009 **********",
                                "changed: [localhost]",
                                "",
                                "PLAY [controllers[0]] **********************************************************",
                                "",
                                "TASK [Gathering Facts] *********************************************************",
                                "Sunday 01 March 2026  22:16:46 +0000 (0:00:02.878)       0:00:02.888 **********",
                                "[WARNING]: Platform linux on host instance is using the discovered Python",
                                "interpreter at /usr/bin/python3.10, but future installation of another Python",
                                "interpreter could change the meaning of that path. See",
                                "https://docs.ansible.com/ansible-",
                                "core/2.17/reference_appendices/interpreter_discovery.html for more information.",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstacksdk : Install openstacksdk] *****************",
                                "Sunday 01 March 2026  22:16:52 +0000 (0:00:05.311)       0:00:08.199 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstacksdk : Create openstack config directory] ****",
                                "Sunday 01 March 2026  22:16:53 +0000 (0:00:01.109)       0:00:09.309 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstacksdk : Generate cloud config file] ***********",
                                "Sunday 01 March 2026  22:16:53 +0000 (0:00:00.378)       0:00:09.687 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:16:54 +0000 (0:00:00.652)       0:00:10.340 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:16:54 +0000 (0:00:00.177)       0:00:10.518 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:16:54 +0000 (0:00:00.073)       0:00:10.591 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:16:54 +0000 (0:00:00.065)       0:00:10.657 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:16:54 +0000 (0:00:00.061)       0:00:10.718 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:16:54 +0000 (0:00:00.064)       0:00:10.783 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:16:54 +0000 (0:00:00.061)       0:00:10.844 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:16:54 +0000 (0:00:00.074)       0:00:10.919 **********",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=dashboard)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:16:55 +0000 (0:00:00.082)       0:00:11.002 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks] ***",
                                "Sunday 01 March 2026  22:16:55 +0000 (0:00:00.100)       0:00:11.102 **********",
                                "included: /home/zuul/.ansible/collections/ansible_collections/vexxhost/kubernetes/roles/upload_helm_chart/tasks/synchronize.yml for instance",
                                "",
                                "TASK [vexxhost.kubernetes.upload_helm_chart : Upload Helm chart] ***************",
                                "Sunday 01 March 2026  22:16:55 +0000 (0:00:00.161)       0:00:11.264 **********",
                                "changed: [instance]",
                                "",
                                "TASK [Generate OpenStack-Helm endpoints] ***************************************",
                                "Sunday 01 March 2026  22:16:55 +0000 (0:00:00.647)       0:00:11.911 **********",
                                "included: openstack_helm_endpoints for instance",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints] ***",
                                "Sunday 01 March 2026  22:16:56 +0000 (0:00:00.158)       0:00:12.070 **********",
                                "ok: [instance]",
                                "",
                                "TASK [Create RabbitMQ cluster] *************************************************",
                                "Sunday 01 March 2026  22:16:56 +0000 (0:00:00.133)       0:00:12.203 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab RabbitMQ cluster secret] ***",
                                "Sunday 01 March 2026  22:16:56 +0000 (0:00:00.069)       0:00:12.273 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with RabbitMQ cluster credentials] ***",
                                "Sunday 01 March 2026  22:16:56 +0000 (0:00:00.069)       0:00:12.343 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Grab Percona XtraDB cluster secret] ***",
                                "Sunday 01 March 2026  22:16:56 +0000 (0:00:00.061)       0:00:12.404 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Cache fact with Percona XtraDB password] ***",
                                "Sunday 01 March 2026  22:16:56 +0000 (0:00:00.063)       0:00:12.467 **********",
                                "skipping: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Reset value for OpenStack_Helm endpoints] ***",
                                "Sunday 01 March 2026  22:16:56 +0000 (0:00:00.071)       0:00:12.538 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints] ***",
                                "Sunday 01 March 2026  22:16:56 +0000 (0:00:00.073)       0:00:12.612 **********",
                                "ok: [instance] => (item=identity)",
                                "ok: [instance] => (item=dashboard)",
                                "",
                                "TASK [vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts] ***********",
                                "Sunday 01 March 2026  22:16:56 +0000 (0:00:00.100)       0:00:12.712 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.tempest : Get test image object] *********************",
                                "Sunday 01 March 2026  22:16:56 +0000 (0:00:00.103)       0:00:12.816 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.tempest : Configure test image ref] ******************",
                                "Sunday 01 March 2026  22:16:57 +0000 (0:00:01.127)       0:00:13.943 **********",
                                "ok: [instance] => (item={'key': 'conf', 'value': {'tempest': {'compute': {'image_ref': 'fe32d17b-dbe1-462c-bd98-f9a72c2d9bd0'}}}})",
                                "",
                                "TASK [vexxhost.atmosphere.tempest : Get test flavor object] ********************",
                                "Sunday 01 March 2026  22:16:58 +0000 (0:00:00.089)       0:00:14.032 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.tempest : Set test flavor ref] ***********************",
                                "Sunday 01 March 2026  22:16:59 +0000 (0:00:01.156)       0:00:15.188 **********",
                                "ok: [instance] => (item={'key': 'conf', 'value': {'tempest': {'compute': {'flavor_ref': '0e6c2c28-f07e-4876-9011-42d2234ae20d'}}}})",
                                "",
                                "TASK [vexxhost.atmosphere.tempest : Get test network object] *******************",
                                "Sunday 01 March 2026  22:16:59 +0000 (0:00:00.095)       0:00:15.284 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.tempest : Set test network ref] **********************",
                                "Sunday 01 March 2026  22:17:00 +0000 (0:00:01.141)       0:00:16.426 **********",
                                "ok: [instance] => (item={'key': 'conf', 'value': {'tempest': {'network': {'public_network_id': 'da6f04f9-28d2-4224-9cbf-d4cb1587678d'}}}})",
                                "",
                                "TASK [vexxhost.atmosphere.tempest : Deploy Helm chart] *************************",
                                "Sunday 01 March 2026  22:17:00 +0000 (0:00:00.082)       0:00:16.509 **********",
                                "changed: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.tempest : Get tempest job object] ********************",
                                "Sunday 01 March 2026  22:24:58 +0000 (0:07:58.208)       0:08:14.718 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.tempest : Get tempest log] ***************************",
                                "Sunday 01 March 2026  22:24:59 +0000 (0:00:01.042)       0:08:15.760 **********",
                                "ok: [instance]",
                                "",
                                "TASK [vexxhost.atmosphere.tempest : Print tempest log details] *****************",
                                "Sunday 01 March 2026  22:25:00 +0000 (0:00:01.000)       0:08:16.760 **********",
                                "ok: [instance] => {",
                                "    \"msg\": [",
                                "        \"+ tempest cleanup --init-saved-state\",",
                                "        \"+ '[' true == false ']'\",",
                                "        \"+ tempest run --config-file /etc/tempest/tempest.conf -w 4 --smoke\",",
                                "        \"/var/lib/openstack/lib/python3.12/site-packages/jsonpath_rw_ext/_string.py:19: SyntaxWarning: invalid escape sequence '\\\\('\",",
                                "        \"  SUB = re.compile(\\\"sub\\\\(/(.*)/,\\\\s+(.*)\\\\)\\\")\",",
                                "        \"/var/lib/openstack/lib/python3.12/site-packages/jsonpath_rw_ext/_string.py:20: SyntaxWarning: invalid escape sequence '\\\\('\",",
                                "        \"  SPLIT = re.compile(\\\"split\\\\((.),\\\\s+(\\\\d+),\\\\s+(\\\\d+|-1)\\\\)\\\")\",",
                                "        \"{1} tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_get_flavor [0.143926s] ... ok\",",
                                "        \"{1} tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON.test_list_flavors [0.064843s] ... ok\",",
                                "        \"{2} tempest.api.compute.security_groups.test_security_group_rules.SecurityGroupRulesTestJSON.test_security_group_rules_create [1.264053s] ... ok\",",
                                "        \"{2} tempest.api.compute.security_groups.test_security_group_rules.SecurityGroupRulesTestJSON.test_security_group_rules_list [0.956075s] ... ok\",",
                                "        \"{0} tempest.api.compute.security_groups.test_security_groups.SecurityGroupsTestJSON.test_security_groups_create_list_delete [3.489915s] ... ok\",",
                                "        \"{2} tempest.api.identity.admin.v3.test_groups.GroupsV3TestJSON.test_group_users_add_list_delete [2.785619s] ... ok\",",
                                "        \"{1} tempest.api.compute.servers.test_create_server.ServersTestJSON.test_list_servers [0.149341s] ... ok\",",
                                "        \"{1} tempest.api.compute.servers.test_create_server.ServersTestJSON.test_verify_server_details [0.000341s] ... ok\",",
                                "        \"{2} tempest.api.identity.admin.v3.test_regions.RegionsTestJSON.test_create_region_with_specific_id [0.122054s] ... ok\",",
                                "        \"{2} tempest.api.identity.admin.v3.test_services.ServicesTestJSON.test_create_update_get_service [0.296498s] ... ok\",",
                                "        \"{1} tempest.api.compute.test_versions.TestVersions.test_get_version_details [0.634856s] ... ok\",",
                                "        \"{1} tempest.api.compute.test_versions.TestVersions.test_list_api_versions [0.008604s] ... ok\",",
                                "        \"{1} tempest.api.identity.admin.v3.test_credentials.CredentialsTestJSON.test_credentials_create_get_update_delete [0.161895s] ... ok\",",
                                "        \"{0} tempest.api.compute.servers.test_create_server.ServersTestBootFromVolume.test_list_servers [0.095146s] ... ok\",",
                                "        \"{0} tempest.api.compute.servers.test_create_server.ServersTestBootFromVolume.test_verify_server_details [0.000474s] ... ok\",",
                                "        \"{2} tempest.api.network.test_networks.BulkNetworkOpsTest.test_bulk_create_delete_network [4.370945s] ... ok\",",
                                "        \"{2} tempest.api.network.test_networks.BulkNetworkOpsTest.test_bulk_create_delete_port [2.981629s] ... ok\",",
                                "        \"{1} tempest.api.identity.admin.v3.test_policies.PoliciesTestJSON.test_create_update_delete_policy [0.204571s] ... ok\",",
                                "        \"{3} tempest.api.compute.servers.test_attach_interfaces.AttachInterfacesUnderV243Test.test_add_remove_fixed_ip [39.107806s] ... ok\",",
                                "        \"{2} tempest.api.network.test_networks.BulkNetworkOpsTest.test_bulk_create_delete_subnet [3.907191s] ... ok\",",
                                "        \"{1} tempest.api.identity.admin.v3.test_trusts.TrustsV3TestJSON.test_get_trusts_all [2.526857s] ... ok\",",
                                "        \"{1} tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_media_types [0.093393s] ... ok\",",
                                "        \"{1} tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_resources [0.031125s] ... ok\",",
                                "        \"{1} tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_api_version_statuses [0.032603s] ... ok\",",
                                "        \"{1} tempest.api.identity.v3.test_api_discovery.TestApiDiscovery.test_list_api_versions [0.008297s] ... ok\",",
                                "        \"{2} tempest.api.network.test_networks.NetworksIpV6Test.test_create_update_delete_network_subnet [3.907190s] ... ok\",",
                                "        \"{2} tempest.api.network.test_networks.NetworksIpV6Test.test_external_network_visibility [0.210343s] ... ok\",",
                                "        \"{2} tempest.api.network.test_networks.NetworksIpV6Test.test_list_networks [0.420401s] ... ok\",",
                                "        \"{2} tempest.api.network.test_networks.NetworksIpV6Test.test_list_subnets [0.064731s] ... ok\",",
                                "        \"{2} tempest.api.network.test_networks.NetworksIpV6Test.test_show_network [0.154238s] ... ok\",",
                                "        \"{2} tempest.api.network.test_networks.NetworksIpV6Test.test_show_subnet [0.087403s] ... ok\",",
                                "        \"{1} tempest.api.identity.v3.test_domains.DefaultDomainTestJSON.test_default_domain_exists [0.112793s] ... ok\",",
                                "        \"{0} tempest.api.compute.servers.test_server_addresses.ServerAddressesTestJSON.test_list_server_addresses [0.042528s] ... ok\",",
                                "        \"{0} tempest.api.compute.servers.test_server_addresses.ServerAddressesTestJSON.test_list_server_addresses_by_network [0.090606s] ... ok\",",
                                "        \"{1} tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_delete_image [0.428597s] ... ok\",",
                                "        \"{1} tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_register_upload_get_image_file [0.561660s] ... ok\",",
                                "        \"{1} tempest.api.image.v2.test_images.BasicOperationsImagesTest.test_update_image [0.170459s] ... ok\",",
                                "        \"{3} tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_list_servers [0.063175s] ... ok\",",
                                "        \"{3} tempest.api.compute.servers.test_create_server.ServersTestManualDisk.test_verify_server_details [0.000396s] ... ok\",",
                                "        \"{0} tempest.api.identity.admin.v3.test_domains.DomainsTestJSON.test_create_update_delete_domain [0.584610s] ... ok\",",
                                "        \"{1} tempest.api.network.test_networks.BulkNetworkOpsIpV6Test.test_bulk_create_delete_network [2.905192s] ... ok\",",
                                "        \"{1} tempest.api.network.test_networks.BulkNetworkOpsIpV6Test.test_bulk_create_delete_port [3.410042s] ... ok\",",
                                "        \"{1} tempest.api.network.test_networks.BulkNetworkOpsIpV6Test.test_bulk_create_delete_subnet [2.928932s] ... ok\",",
                                "        \"{2} tempest.api.network.test_routers.RoutersIpV6Test.test_add_multiple_router_interfaces [15.352034s] ... ok\",",
                                "        \"{0} tempest.api.identity.admin.v3.test_roles.RolesV3TestJSON.test_role_create_update_show_list [0.209525s] ... ok\",",
                                "        \"{2} tempest.api.network.test_routers.RoutersIpV6Test.test_add_remove_router_interface_with_port_id [6.320973s] ... ok\",",
                                "        \"{0} tempest.api.image.v2.test_versions.VersionsTest.test_list_versions [0.009240s] ... ok\",",
                                "        \"{2} tempest.api.network.test_routers.RoutersIpV6Test.test_add_remove_router_interface_with_subnet_id [5.813371s] ... ok\",",
                                "        \"{1} tempest.api.network.test_ports.PortsTestJSON.test_create_port_in_allowed_allocation_pools [5.204598s] ... ok\",",
                                "        \"{2} tempest.api.network.test_routers.RoutersIpV6Test.test_create_show_list_update_delete_router [4.553556s] ... ok\",",
                                "        \"{1} tempest.api.network.test_ports.PortsTestJSON.test_create_port_with_no_securitygroups [4.396013s] ... ok\",",
                                "        \"{1} tempest.api.network.test_ports.PortsTestJSON.test_create_update_delete_port [1.457659s] ... ok\",",
                                "        \"{1} tempest.api.network.test_ports.PortsTestJSON.test_list_ports [0.061039s] ... ok\",",
                                "        \"{1} tempest.api.network.test_ports.PortsTestJSON.test_show_port [0.049157s] ... ok\",",
                                "        \"{2} tempest.api.network.test_subnetpools_extensions.SubnetPoolsTestJSON.test_create_list_show_update_delete_subnetpools [0.535435s] ... ok\",",
                                "        \"{1} tempest.api.network.test_versions.NetworksApiDiscovery.test_api_version_resources [0.007773s] ... ok\",",
                                "        \"{1} tempest.api.network.test_versions.NetworksApiDiscovery.test_show_api_v2_details [0.104734s] ... ok\",",
                                "        \"{0} tempest.api.network.test_floating_ips.FloatingIPTestJSON.test_create_floating_ip_specifying_a_fixed_ip_address [1.623475s] ... ok\",",
                                "        \"{0} tempest.api.network.test_floating_ips.FloatingIPTestJSON.test_create_list_show_update_delete_floating_ip [2.494254s] ... ok\",",
                                "        \"{2} tempest.api.object_storage.test_account_services.AccountTest.test_list_account_metadata [0.042736s] ... ok\",",
                                "        \"{2} tempest.api.object_storage.test_account_services.AccountTest.test_list_containers [0.015456s] ... ok\",",
                                "        \"{1} tempest.api.object_storage.test_object_services.ObjectTest.test_create_object [0.060405s] ... ok\",",
                                "        \"{1} tempest.api.object_storage.test_object_services.ObjectTest.test_get_object [0.036170s] ... ok\",",
                                "        \"{1} tempest.api.object_storage.test_object_services.ObjectTest.test_list_object_metadata [0.030012s] ... ok\",",
                                "        \"{1} tempest.api.object_storage.test_object_services.ObjectTest.test_update_object_metadata [0.060115s] ... ok\",",
                                "        \"{2} tempest.api.object_storage.test_container_quotas.ContainerQuotasTest.test_upload_large_object [0.180045s] ... ok\",",
                                "        \"{2} tempest.api.object_storage.test_container_quotas.ContainerQuotasTest.test_upload_too_many_objects [0.159305s] ... ok\",",
                                "        \"{2} tempest.api.object_storage.test_container_quotas.ContainerQuotasTest.test_upload_valid_object [0.100581s] ... ok\",",
                                "        \"{0} tempest.api.network.test_ports.PortsIpV6TestJSON.test_create_port_in_allowed_allocation_pools [3.182550s] ... ok\",",
                                "        \"{0} tempest.api.network.test_ports.PortsIpV6TestJSON.test_create_port_with_no_securitygroups [2.809686s] ... ok\",",
                                "        \"{0} tempest.api.network.test_ports.PortsIpV6TestJSON.test_create_update_delete_port [1.077706s] ... ok\",",
                                "        \"{0} tempest.api.network.test_ports.PortsIpV6TestJSON.test_list_ports [0.050407s] ... ok\",",
                                "        \"{0} tempest.api.network.test_ports.PortsIpV6TestJSON.test_show_port [0.049320s] ... ok\",",
                                "        \"{1} neutron_tempest_plugin.api.admin.test_tag.TagFilterPortTestJSON.test_filter_port_tags [1.728471s] ... ok\",",
                                "        \"{2} tempest.scenario.test_dashboard_basic_ops.TestDashboardBasicOps.test_basic_scenario [6.482377s] ... ok\",",
                                "        \"{1} neutron_tempest_plugin.api.admin.test_tag.TagFilterSecGroupTestJSON.test_filter_security_group_tags [0.451785s] ... ok\",",
                                "        \"{3} tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON.test_reboot_server_hard [18.942391s] ... ok\",",
                                "        \"{0} tempest.api.network.test_routers.RoutersTest.test_add_multiple_router_interfaces [12.976368s] ... ok\",",
                                "        \"{1} neutron_tempest_plugin.api.admin.test_tag.TagSubnetPoolTestJSON.test_subnetpool_tags [1.297360s] ... ok\",",
                                "        \"{0} tempest.api.network.test_routers.RoutersTest.test_add_remove_router_interface_with_port_id [7.708847s] ... ok\",",
                                "        \"{3} tempest.api.identity.admin.v3.test_endpoints.EndPointsTestJSON.test_update_endpoint [0.587323s] ... ok\",",
                                "        \"{1} neutron_tempest_plugin.api.admin.test_tag.TagSubnetTestJSON.test_subnet_tags [0.871817s] ... ok\",",
                                "        \"{3} tempest.api.network.test_extensions.ExtensionsTestJSON.test_list_show_extensions [0.899420s] ... ok\",",
                                "        \"{1} setUpClass (neutron_tempest_plugin.vpnaas.api.test_vpnaas.VPNaaSTestJSON) ... SKIPPED: vpnaas extension not enabled.\",",
                                "        \"{3} tempest.api.network.test_networks.NetworksTest.test_create_update_delete_network_subnet [2.837927s] ... ok\",",
                                "        \"{3} tempest.api.network.test_networks.NetworksTest.test_external_network_visibility [0.205181s] ... ok\",",
                                "        \"{3} tempest.api.network.test_networks.NetworksTest.test_list_networks [0.108839s] ... ok\",",
                                "        \"{3} tempest.api.network.test_networks.NetworksTest.test_list_subnets [0.050421s] ... ok\",",
                                "        \"{0} tempest.api.network.test_routers.RoutersTest.test_add_remove_router_interface_with_subnet_id [16.624598s] ... ok\",",
                                "        \"{3} tempest.api.network.test_networks.NetworksTest.test_show_network [0.604144s] ... ok\",",
                                "        \"{3} tempest.api.network.test_networks.NetworksTest.test_show_subnet [0.044985s] ... ok\",",
                                "        \"{0} tempest.api.network.test_routers.RoutersTest.test_create_show_list_update_delete_router [3.555721s] ... ok\",",
                                "        \"{3} tempest.api.network.test_security_groups.SecGroupIPv6Test.test_create_list_update_show_delete_security_group [0.955497s] ... ok\",",
                                "        \"{3} tempest.api.network.test_security_groups.SecGroupIPv6Test.test_create_show_delete_security_group_rule [1.660493s] ... ok\",",
                                "        \"{3} tempest.api.network.test_security_groups.SecGroupIPv6Test.test_list_security_groups [0.052384s] ... ok\",",
                                "        \"{0} tempest.api.object_storage.test_account_quotas.AccountQuotasTest.test_admin_modify_quota [0.260325s] ... ok\",",
                                "        \"{0} tempest.api.object_storage.test_account_quotas.AccountQuotasTest.test_overlimit_upload [0.047805s] ... ok\",",
                                "        \"{0} tempest.api.object_storage.test_account_quotas.AccountQuotasTest.test_upload_valid_object [0.058237s] ... ok\",",
                                "        \"{3} tempest.api.network.test_security_groups.SecGroupTest.test_create_list_update_show_delete_security_group [1.595949s] ... ok\",",
                                "        \"{3} tempest.api.network.test_security_groups.SecGroupTest.test_create_show_delete_security_group_rule [1.644817s] ... ok\",",
                                "        \"{3} tempest.api.network.test_security_groups.SecGroupTest.test_list_security_groups [0.049140s] ... ok\",",
                                "        \"{0} tempest.api.object_storage.test_container_services.ContainerTest.test_create_container [0.361703s] ... ok\",",
                                "        \"{0} tempest.api.object_storage.test_container_services.ContainerTest.test_list_container_contents [0.099030s] ... ok\",",
                                "        \"{0} tempest.api.object_storage.test_container_services.ContainerTest.test_list_container_metadata [0.057649s] ... ok\",",
                                "        \"{0} tempest.api.volume.test_versions.VersionsTest.test_list_versions [0.011338s] ... ok\",",
                                "        \"{3} tempest.api.volume.test_volumes_actions.VolumesActionsTest.test_attach_detach_volume_to_instance [9.402677s] ... ok\",",
                                "        \"{3} tempest.api.volume.test_volumes_get.VolumesGetTest.test_volume_create_get_update_delete [5.399387s] ... ok\",",
                                "        \"2026-03-01 22:21:19.833 18 WARNING tempest.lib.common.ssh [-] Failed to establish authenticated ssh connection to cirros@10.96.250.218 ([Errno None] Unable to connect to port 22 on 10.96.250.218). Number attempts: 1. Retry after 2 seconds.: paramiko.ssh_exception.NoValidConnectionsError: [Errno None] Unable to connect to port 22 on 10.96.250.218\",",
                                "        \"{3} tempest.api.volume.test_volumes_get.VolumesGetTest.test_volume_create_get_update_delete_from_image [5.562776s] ... ok\",",
                                "        \"{3} tempest.api.volume.test_volumes_list.VolumesListTestJSON.test_volume_list [0.063223s] ... ok\",",
                                "        \"{0} tempest.scenario.test_network_basic_ops.TestNetworkBasicOps.test_network_basic_ops [57.493260s] ... ok\",",
                                "        \"{0} neutron_tempest_plugin.api.admin.test_tag.TagFilterRouterTestJSON.test_filter_router_tags [0.656806s] ... ok\",",
                                "        \"{0} neutron_tempest_plugin.api.admin.test_tag.TagNetworkTestJSON.test_network_tags [0.972950s] ... ok\",",
                                "        \"{0} neutron_tempest_plugin.api.admin.test_tag.TagQosPolicyTestJSON.test_qos_policy_tags [0.523558s] ... ok\",",
                                "        \"{3} tempest.scenario.test_server_basic_ops.TestServerBasicOps.test_server_basic_ops [31.758761s] ... ok\",",
                                "        \"{0} neutron_tempest_plugin.api.admin.test_tag.TagSecGroupTestJSON.test_security_group_tags [0.729708s] ... ok\",",
                                "        \"{3} setUpClass (tempest.scenario.test_server_multinode.TestServerMultinode) ... SKIPPED: Less than 2 compute nodes, skipping multinode tests.\",",
                                "        \"{0} neutron_tempest_plugin.api.admin.test_tag.TagTrunkTestJSON.test_trunk_tags [0.523296s] ... ok\",",
                                "        \"{3} neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_create_rbac_policy_with_target_tenant_none [2.969443s] ... ok\",",
                                "        \"{3} neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_create_rbac_policy_with_target_tenant_too_long_id [1.811447s] ... ok\",",
                                "        \"{3} neutron_tempest_plugin.api.admin.test_shared_network_extension.RBACSharedNetworksTest.test_network_only_visible_to_policy_target [3.131806s] ... ok\",",
                                "        \"{3} neutron_tempest_plugin.api.admin.test_tag.TagFilterFloatingIpTestJSON.test_filter_floatingip_tags [0.654962s] ... ok\",",
                                "        \"{3} neutron_tempest_plugin.api.admin.test_tag.TagFilterSubnetTestJSON.test_filter_subnet_tags [0.535262s] ... ok\",",
                                "        \"{2} octavia_tempest_plugin.tests.scenario.v2.test_traffic_ops.TrafficOperationsScenarioTest.test_basic_http_traffic [58.302840s] ... ok\",",
                                "        \"{3} neutron_tempest_plugin.api.admin.test_tag.TagFilterTrunkTestJSON.test_filter_trunk_tags [0.284411s] ... ok\",",
                                "        \"{3} neutron_tempest_plugin.api.admin.test_tag.TagPortTestJSON.test_port_tags [0.801700s] ... ok\",",
                                "        \"{2} neutron_tempest_plugin.api.admin.test_tag.TagFilterNetworkTestJSON.test_filter_network_tags [1.011624s] ... ok\",",
                                "        \"{2} neutron_tempest_plugin.api.admin.test_tag.TagFilterQosPolicyTestJSON.test_filter_qos_policy_tags [0.965179s] ... ok\",",
                                "        \"{2} neutron_tempest_plugin.api.admin.test_tag.TagFilterSubnetpoolTestJSON.test_filter_subnetpool_tags [0.266950s] ... ok\",",
                                "        \"{2} neutron_tempest_plugin.api.admin.test_tag.TagFloatingIpTestJSON.test_floatingip_tags [0.764660s] ... ok\",",
                                "        \"{2} neutron_tempest_plugin.api.admin.test_tag.TagRouterTestJSON.test_router_tags [0.993067s] ... ok\",",
                                "        \"{2} neutron_tempest_plugin.api.admin.test_tag.UpdateTagsTest.test_update_tags_affects_only_updated_resource [2.153635s] ... ok\",",
                                "        \"\",",
                                "        \"======\",",
                                "        \"Totals\",",
                                "        \"======\",",
                                "        \"Ran: 131 tests in 413.1190 sec.\",",
                                "        \" - Passed: 129\",",
                                "        \" - Skipped: 2\",",
                                "        \" - Expected Fail: 0\",",
                                "        \" - Unexpected Success: 0\",",
                                "        \" - Failed: 0\",",
                                "        \"Sum of execute time for each test: 406.1944 sec.\",",
                                "        \"\",",
                                "        \"==============\",",
                                "        \"Worker Balance\",",
                                "        \"==============\",",
                                "        \" - Worker 0 (32 tests) => 0:04:33.129895\",",
                                "        \" - Worker 1 (36 tests) => 0:02:37.971900\",",
                                "        \" - Worker 2 (32 tests) => 0:06:45.470897\",",
                                "        \" - Worker 3 (31 tests) => 0:05:39.364864\",",
                                "        \"+ tempest cleanup\",",
                                "        \"\"",
                                "    ]",
                                "}",
                                "",
                                "TASK [vexxhost.atmosphere.tempest : Fail when tempest result is failed] ********",
                                "Sunday 01 March 2026  22:25:00 +0000 (0:00:00.091)       0:08:16.851 **********",
                                "skipping: [instance]",
                                "",
                                "PLAY RECAP *********************************************************************",
                                "instance                   : ok=25   changed=2    unreachable=0    failed=0    skipped=11   rescued=0    ignored=0",
                                "localhost                  : ok=1    changed=1    unreachable=0    failed=0    skipped=0    rescued=0    ignored=0",
                                "",
                                "Sunday 01 March 2026  22:25:00 +0000 (0:00:00.063)       0:08:16.915 **********",
                                "===============================================================================",
                                "vexxhost.atmosphere.tempest : Deploy Helm chart ----------------------- 478.21s",
                                "Gathering Facts --------------------------------------------------------- 5.31s",
                                "Run \"stestr\" tests ------------------------------------------------------ 2.88s",
                                "vexxhost.atmosphere.tempest : Get test flavor object -------------------- 1.16s",
                                "vexxhost.atmosphere.tempest : Get test network object ------------------- 1.14s",
                                "vexxhost.atmosphere.tempest : Get test image object --------------------- 1.13s",
                                "vexxhost.atmosphere.openstacksdk : Install openstacksdk ----------------- 1.11s",
                                "vexxhost.atmosphere.tempest : Get tempest job object -------------------- 1.04s",
                                "vexxhost.atmosphere.tempest : Get tempest log --------------------------- 1.00s",
                                "vexxhost.atmosphere.openstacksdk : Generate cloud config file ----------- 0.65s",
                                "vexxhost.kubernetes.upload_helm_chart : Upload Helm chart --------------- 0.65s",
                                "vexxhost.atmosphere.openstacksdk : Create openstack config directory ---- 0.38s",
                                "vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints --- 0.18s",
                                "vexxhost.kubernetes.upload_helm_chart : Include help chart upload method tasks --- 0.16s",
                                "Generate OpenStack-Helm endpoints --------------------------------------- 0.16s",
                                "vexxhost.atmosphere.openstack_helm_endpoints : Retrieve list of all the needed endpoints --- 0.13s",
                                "vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts ----------- 0.10s",
                                "vexxhost.atmosphere.openstack_helm_endpoints : Generate OpenStack-Helm endpoints --- 0.10s",
                                "vexxhost.atmosphere.openstack_helm_endpoints : Clean-up facts ----------- 0.10s",
                                "vexxhost.atmosphere.tempest : Set test flavor ref ----------------------- 0.10s",
                                "INFO     [aio > verify] Executed: Successful",
                                "INFO     [aio > cleanup] Executing",
                                "WARNING  [aio > cleanup] Executed: Missing playbook (Remove from test_sequence to suppress)",
                                "INFO     [aio > destroy] Executing",
                                "WARNING  [aio > destroy] Skipping, '--destroy=never' requested.",
                                "INFO     [aio > destroy] Executed: Successful",
                                "WARNING  Molecule executed 1 scenario (1 missing files)"
                            ],
                            "zuul_log_id": "0242ac17-0011-7e84-f0f8-000000000006-1-instance"
                        }
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:25:01.432279Z",
                            "start": "2026-03-01T21:03:34.823005Z"
                        },
                        "id": "0242ac17-0011-7e84-f0f8-000000000006",
                        "name": "Run Molecule scenario"
                    }
                }
            ]
        }
    ],
    "stats": {
        "instance": {
            "changed": 3,
            "failures": 0,
            "ignored": 0,
            "ok": 3,
            "rescued": 0,
            "skipped": 0,
            "unreachable": 0
        }
    },
    "trusted": false
},
{
    "branch": "stable/2023.2",
    "index": "0",
    "phase": "post",
    "playbook": "github.com/vexxhost/atmosphere/test-playbooks/molecule/post.yml",
    "plays": [
        {
            "play": {
                "duration": {
                    "end": "2026-03-01T22:31:16.825420Z",
                    "start": "2026-03-01T22:25:02.258614Z"
                },
                "id": "0242ac17-0011-aaa1-afc9-000000000002",
                "name": "all"
            },
            "tasks": [
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/tmp/logs/system",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/tmp/logs/system",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": null,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/tmp/logs/system",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/tmp/logs/system",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-00000000000a",
                        "name": "gather-host-logs",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-host-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:25:02.642751Z",
                            "start": "2026-03-01T22:25:02.272899Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-00000000000c",
                        "name": "creating directory for system status"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -x\nsystemd-cgls --full --all --no-pager > /tmp/logs/system/systemd-cgls.txt\nip addr > /tmp/logs/system/ip-addr.txt\nip route > /tmp/logs/system/ip-route.txt\nlsblk > /tmp/logs/system/lsblk.txt\nmount > /tmp/logs/system/mount.txt\ndocker images > /tmp/logs/system/docker-images.txt\nbrctl show > /tmp/logs/system/brctl-show.txt\nps aux --sort=-%mem > /tmp/logs/system/ps.txt\ndpkg -l > /tmp/logs/system/packages.txt\nCONTAINERS=($(docker ps -a --format '{{ .Names }}' --filter label=zuul))\nif [ ! -z \"$CONTAINERS\" ]; then\n  mkdir -p \"/tmp/logs/system/containers\"\n  for CONTAINER in ${CONTAINERS}; do\n    docker logs \"${CONTAINER}\" > \"/tmp/logs/system/containers/${CONTAINER}.txt\"\n  done\nfi",
                            "delta": "0:00:00.220933",
                            "end": "2026-03-01 22:25:03.222454",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -x\nsystemd-cgls --full --all --no-pager > /tmp/logs/system/systemd-cgls.txt\nip addr > /tmp/logs/system/ip-addr.txt\nip route > /tmp/logs/system/ip-route.txt\nlsblk > /tmp/logs/system/lsblk.txt\nmount > /tmp/logs/system/mount.txt\ndocker images > /tmp/logs/system/docker-images.txt\nbrctl show > /tmp/logs/system/brctl-show.txt\nps aux --sort=-%mem > /tmp/logs/system/ps.txt\ndpkg -l > /tmp/logs/system/packages.txt\nCONTAINERS=($(docker ps -a --format '{{ .Names }}' --filter label=zuul))\nif [ ! -z \"$CONTAINERS\" ]; then\n  mkdir -p \"/tmp/logs/system/containers\"\n  for CONTAINER in ${CONTAINERS}; do\n    docker logs \"${CONTAINER}\" > \"/tmp/logs/system/containers/${CONTAINER}.txt\"\n  done\nfi",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0011-aaa1-afc9-00000000000d-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-03-01 22:25:03.001521",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "+ systemd-cgls --full --all --no-pager\n+ ip addr\n+ ip route\n+ lsblk\n+ mount\n+ docker images\n+ brctl show\n/bin/bash: line 8: brctl: command not found\n+ ps aux --sort=-%mem\n+ dpkg -l\n+ CONTAINERS=($(docker ps -a --format '{{ .Names }}' --filter label=zuul))\n++ docker ps -a --format '{{ .Names }}' --filter label=zuul\n+ '[' '!' -z '' ']'",
                            "stdout_lines": [
                                "+ systemd-cgls --full --all --no-pager",
                                "+ ip addr",
                                "+ ip route",
                                "+ lsblk",
                                "+ mount",
                                "+ docker images",
                                "+ brctl show",
                                "/bin/bash: line 8: brctl: command not found",
                                "+ ps aux --sort=-%mem",
                                "+ dpkg -l",
                                "+ CONTAINERS=($(docker ps -a --format '{{ .Names }}' --filter label=zuul))",
                                "++ docker ps -a --format '{{ .Names }}' --filter label=zuul",
                                "+ '[' '!' -z '' ']'"
                            ],
                            "zuul_log_id": "0242ac17-0011-aaa1-afc9-00000000000d-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-00000000000a",
                        "name": "gather-host-logs",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-host-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:25:03.685616Z",
                            "start": "2026-03-01T22:25:02.666510Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-00000000000d",
                        "name": "Get logs for each host"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "changed": true,
                            "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --rsh='/usr/bin/ssh -S none -o Port=22 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' --rsync-path='sudo -u root rsync' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.156:/tmp/logs/system /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/instance",
                            "invocation": {
                                "module_args": {
                                    "_local_rsync_password": null,
                                    "_local_rsync_path": "rsync",
                                    "_ssh_args": null,
                                    "_substitute_controller": false,
                                    "archive": true,
                                    "checksum": false,
                                    "compress": true,
                                    "copy_links": false,
                                    "delay_updates": true,
                                    "delete": false,
                                    "dest": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/instance",
                                    "dest_port": 22,
                                    "dirs": false,
                                    "existing_only": false,
                                    "group": null,
                                    "link_dest": null,
                                    "links": null,
                                    "mode": "pull",
                                    "owner": null,
                                    "partial": false,
                                    "perms": null,
                                    "private_key": null,
                                    "recursive": null,
                                    "rsync_opts": [],
                                    "rsync_path": "sudo -u root rsync",
                                    "rsync_timeout": 0,
                                    "set_remote_user": true,
                                    "src": "zuul@199.204.45.156:/tmp/logs/system",
                                    "ssh_connection_multiplexing": false,
                                    "times": null,
                                    "use_ssh_args": false,
                                    "verify_host": false
                                }
                            },
                            "msg": "created directory /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/instance\ncd+++++++++ system/\n>f+++++++++ system/brctl-show.txt\n>f+++++++++ system/docker-images.txt\n>f+++++++++ system/ip-addr.txt\n>f+++++++++ system/ip-route.txt\n>f+++++++++ system/lsblk.txt\n>f+++++++++ system/mount.txt\n>f+++++++++ system/packages.txt\n>f+++++++++ system/ps.txt\n>f+++++++++ system/systemd-cgls.txt\n",
                            "rc": 0,
                            "stdout_lines": [
                                "created directory /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/instance",
                                "cd+++++++++ system/",
                                ">f+++++++++ system/brctl-show.txt",
                                ">f+++++++++ system/docker-images.txt",
                                ">f+++++++++ system/ip-addr.txt",
                                ">f+++++++++ system/ip-route.txt",
                                ">f+++++++++ system/lsblk.txt",
                                ">f+++++++++ system/mount.txt",
                                ">f+++++++++ system/packages.txt",
                                ">f+++++++++ system/ps.txt",
                                ">f+++++++++ system/systemd-cgls.txt"
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-00000000000a",
                        "name": "gather-host-logs",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-host-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:25:04.336631Z",
                            "start": "2026-03-01T22:25:03.693194Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-00000000000e",
                        "name": "Downloads logs to executor"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "msg": "All items completed",
                            "results": [
                                {
                                    "ansible_loop_var": "directory",
                                    "changed": true,
                                    "diff": {
                                        "after": {
                                            "path": "/tmp/logs/helm/values",
                                            "state": "directory"
                                        },
                                        "before": {
                                            "path": "/tmp/logs/helm/values",
                                            "state": "absent"
                                        }
                                    },
                                    "directory": "values",
                                    "failed": false,
                                    "gid": 0,
                                    "group": "root",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": null,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/tmp/logs/helm/values",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "root",
                                    "path": "/tmp/logs/helm/values",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 0
                                },
                                {
                                    "ansible_loop_var": "directory",
                                    "changed": true,
                                    "diff": {
                                        "after": {
                                            "path": "/tmp/logs/helm/releases",
                                            "state": "directory"
                                        },
                                        "before": {
                                            "path": "/tmp/logs/helm/releases",
                                            "state": "absent"
                                        }
                                    },
                                    "directory": "releases",
                                    "failed": false,
                                    "gid": 0,
                                    "group": "root",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": null,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/tmp/logs/helm/releases",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "root",
                                    "path": "/tmp/logs/helm/releases",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 0
                                }
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-000000000010",
                        "name": "helm-release-status",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/helm-release-status"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:25:04.759892Z",
                            "start": "2026-03-01T22:25:04.346863Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-000000000012",
                        "name": "creating directory for helm release status"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -e\n\nfor namespace in $(kubectl get namespaces --no-headers --output custom-columns=\":metadata.name\"); do\n      # get all Helm releases including pending and failed releases\n      for release in $(helm list --all --short --namespace $namespace); do\n              # Make respective directories only when a Helm release actually exists in the namespace\n              # to prevent uploading a bunch of empty directories for namespaces without a Helm release.\n              mkdir -p /tmp/logs/helm/releases/$namespace\n              mkdir -p /tmp/logs/helm/values/$namespace\n\n              helm status $release --namespace $namespace >> /tmp/logs/helm/releases/$namespace/$release.txt\n              helm get values $release --namespace $namespace --all >> /tmp/logs/helm/values/$namespace/$release.yaml\n      done\ndone",
                            "delta": "0:00:08.730375",
                            "end": "2026-03-01 22:25:13.714862",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -e\n\nfor namespace in $(kubectl get namespaces --no-headers --output custom-columns=\":metadata.name\"); do\n      # get all Helm releases including pending and failed releases\n      for release in $(helm list --all --short --namespace $namespace); do\n              # Make respective directories only when a Helm release actually exists in the namespace\n              # to prevent uploading a bunch of empty directories for namespaces without a Helm release.\n              mkdir -p /tmp/logs/helm/releases/$namespace\n              mkdir -p /tmp/logs/helm/values/$namespace\n\n              helm status $release --namespace $namespace >> /tmp/logs/helm/releases/$namespace/$release.txt\n              helm get values $release --namespace $namespace --all >> /tmp/logs/helm/values/$namespace/$release.yaml\n      done\ndone",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0011-aaa1-afc9-000000000014-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-03-01 22:25:04.984487",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "",
                            "stdout_lines": [],
                            "zuul_log_id": "0242ac17-0011-aaa1-afc9-000000000014-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-000000000010",
                        "name": "helm-release-status",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/helm-release-status"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:25:13.813806Z",
                            "start": "2026-03-01T22:25:04.787897Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-000000000014",
                        "name": "Gather get release status for helm charts"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "changed": true,
                            "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --rsh='/usr/bin/ssh -S none -o Port=22 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' --rsync-path='sudo -u root rsync' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.156:/tmp/logs/helm /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/instance",
                            "invocation": {
                                "module_args": {
                                    "_local_rsync_password": null,
                                    "_local_rsync_path": "rsync",
                                    "_ssh_args": null,
                                    "_substitute_controller": false,
                                    "archive": true,
                                    "checksum": false,
                                    "compress": true,
                                    "copy_links": false,
                                    "delay_updates": true,
                                    "delete": false,
                                    "dest": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/instance",
                                    "dest_port": 22,
                                    "dirs": false,
                                    "existing_only": false,
                                    "group": null,
                                    "link_dest": null,
                                    "links": null,
                                    "mode": "pull",
                                    "owner": null,
                                    "partial": false,
                                    "perms": null,
                                    "private_key": null,
                                    "recursive": null,
                                    "rsync_opts": [],
                                    "rsync_path": "sudo -u root rsync",
                                    "rsync_timeout": 0,
                                    "set_remote_user": true,
                                    "src": "zuul@199.204.45.156:/tmp/logs/helm",
                                    "ssh_connection_multiplexing": false,
                                    "times": null,
                                    "use_ssh_args": false,
                                    "verify_host": false
                                }
                            },
                            "msg": "cd+++++++++ helm/\ncd+++++++++ helm/releases/\ncd+++++++++ helm/releases/auth-system/\n>f+++++++++ helm/releases/auth-system/keycloak.txt\ncd+++++++++ helm/releases/cert-manager/\n>f+++++++++ helm/releases/cert-manager/cert-manager.txt\ncd+++++++++ helm/releases/ingress-nginx/\n>f+++++++++ helm/releases/ingress-nginx/ingress-nginx.txt\ncd+++++++++ helm/releases/kube-system/\n>f+++++++++ helm/releases/kube-system/cilium.txt\ncd+++++++++ helm/releases/local-path-storage/\n>f+++++++++ helm/releases/local-path-storage/local-path-provisioner.txt\ncd+++++++++ helm/releases/monitoring/\n>f+++++++++ helm/releases/monitoring/goldpinger.txt\n>f+++++++++ helm/releases/monitoring/kube-prometheus-stack.txt\n>f+++++++++ helm/releases/monitoring/loki.txt\n>f+++++++++ helm/releases/monitoring/node-feature-discovery.txt\n>f+++++++++ helm/releases/monitoring/prometheus-pushgateway.txt\n>f+++++++++ helm/releases/monitoring/vector.txt\ncd+++++++++ helm/releases/openstack/\n>f+++++++++ helm/releases/openstack/barbican.txt\n>f+++++++++ helm/releases/openstack/ceph-provisioners.txt\n>f+++++++++ helm/releases/openstack/ceph.txt\n>f+++++++++ helm/releases/openstack/cinder.txt\n>f+++++++++ helm/releases/openstack/glance.txt\n>f+++++++++ helm/releases/openstack/heat.txt\n>f+++++++++ helm/releases/openstack/horizon.txt\n>f+++++++++ helm/releases/openstack/keystone.txt\n>f+++++++++ helm/releases/openstack/libvirt.txt\n>f+++++++++ helm/releases/openstack/magnum.txt\n>f+++++++++ helm/releases/openstack/manila.txt\n>f+++++++++ helm/releases/openstack/memcached.txt\n>f+++++++++ helm/releases/openstack/neutron.txt\n>f+++++++++ helm/releases/openstack/nova.txt\n>f+++++++++ helm/releases/openstack/octavia.txt\n>f+++++++++ helm/releases/openstack/openvswitch.txt\n>f+++++++++ helm/releases/openstack/ovn.txt\n>f+++++++++ helm/releases/openstack/placement.txt\n>f+++++++++ helm/releases/openstack/pxc-operator.txt\n>f+++++++++ helm/releases/openstack/rabbitmq-cluster-operator.txt\n>f+++++++++ helm/releases/openstack/staffeln.txt\n>f+++++++++ helm/releases/openstack/tempest.txt\n>f+++++++++ helm/releases/openstack/valkey.txt\ncd+++++++++ helm/releases/rook-ceph/\n>f+++++++++ helm/releases/rook-ceph/rook-ceph.txt\ncd+++++++++ helm/values/\ncd+++++++++ helm/values/auth-system/\n>f+++++++++ helm/values/auth-system/keycloak.yaml\ncd+++++++++ helm/values/cert-manager/\n>f+++++++++ helm/values/cert-manager/cert-manager.yaml\ncd+++++++++ helm/values/ingress-nginx/\n>f+++++++++ helm/values/ingress-nginx/ingress-nginx.yaml\ncd+++++++++ helm/values/kube-system/\n>f+++++++++ helm/values/kube-system/cilium.yaml\ncd+++++++++ helm/values/local-path-storage/\n>f+++++++++ helm/values/local-path-storage/local-path-provisioner.yaml\ncd+++++++++ helm/values/monitoring/\n>f+++++++++ helm/values/monitoring/goldpinger.yaml\n>f+++++++++ helm/values/monitoring/kube-prometheus-stack.yaml\n>f+++++++++ helm/values/monitoring/loki.yaml\n>f+++++++++ helm/values/monitoring/node-feature-discovery.yaml\n>f+++++++++ helm/values/monitoring/prometheus-pushgateway.yaml\n>f+++++++++ helm/values/monitoring/vector.yaml\ncd+++++++++ helm/values/openstack/\n>f+++++++++ helm/values/openstack/barbican.yaml\n>f+++++++++ helm/values/openstack/ceph-provisioners.yaml\n>f+++++++++ helm/values/openstack/ceph.yaml\n>f+++++++++ helm/values/openstack/cinder.yaml\n>f+++++++++ helm/values/openstack/glance.yaml\n>f+++++++++ helm/values/openstack/heat.yaml\n>f+++++++++ helm/values/openstack/horizon.yaml\n>f+++++++++ helm/values/openstack/keystone.yaml\n>f+++++++++ helm/values/openstack/libvirt.yaml\n>f+++++++++ helm/values/openstack/magnum.yaml\n>f+++++++++ helm/values/openstack/manila.yaml\n>f+++++++++ helm/values/openstack/memcached.yaml\n>f+++++++++ helm/values/openstack/neutron.yaml\n>f+++++++++ helm/values/openstack/nova.yaml\n>f+++++++++ helm/values/openstack/octavia.yaml\n>f+++++++++ helm/values/openstack/openvswitch.yaml\n>f+++++++++ helm/values/openstack/ovn.yaml\n>f+++++++++ helm/values/openstack/placement.yaml\n>f+++++++++ helm/values/openstack/pxc-operator.yaml\n>f+++++++++ helm/values/openstack/rabbitmq-cluster-operator.yaml\n>f+++++++++ helm/values/openstack/staffeln.yaml\n>f+++++++++ helm/values/openstack/tempest.yaml\n>f+++++++++ helm/values/openstack/valkey.yaml\ncd+++++++++ helm/values/rook-ceph/\n>f+++++++++ helm/values/rook-ceph/rook-ceph.yaml\n",
                            "rc": 0,
                            "stdout_lines": [
                                "cd+++++++++ helm/",
                                "cd+++++++++ helm/releases/",
                                "cd+++++++++ helm/releases/auth-system/",
                                ">f+++++++++ helm/releases/auth-system/keycloak.txt",
                                "cd+++++++++ helm/releases/cert-manager/",
                                ">f+++++++++ helm/releases/cert-manager/cert-manager.txt",
                                "cd+++++++++ helm/releases/ingress-nginx/",
                                ">f+++++++++ helm/releases/ingress-nginx/ingress-nginx.txt",
                                "cd+++++++++ helm/releases/kube-system/",
                                ">f+++++++++ helm/releases/kube-system/cilium.txt",
                                "cd+++++++++ helm/releases/local-path-storage/",
                                ">f+++++++++ helm/releases/local-path-storage/local-path-provisioner.txt",
                                "cd+++++++++ helm/releases/monitoring/",
                                ">f+++++++++ helm/releases/monitoring/goldpinger.txt",
                                ">f+++++++++ helm/releases/monitoring/kube-prometheus-stack.txt",
                                ">f+++++++++ helm/releases/monitoring/loki.txt",
                                ">f+++++++++ helm/releases/monitoring/node-feature-discovery.txt",
                                ">f+++++++++ helm/releases/monitoring/prometheus-pushgateway.txt",
                                ">f+++++++++ helm/releases/monitoring/vector.txt",
                                "cd+++++++++ helm/releases/openstack/",
                                ">f+++++++++ helm/releases/openstack/barbican.txt",
                                ">f+++++++++ helm/releases/openstack/ceph-provisioners.txt",
                                ">f+++++++++ helm/releases/openstack/ceph.txt",
                                ">f+++++++++ helm/releases/openstack/cinder.txt",
                                ">f+++++++++ helm/releases/openstack/glance.txt",
                                ">f+++++++++ helm/releases/openstack/heat.txt",
                                ">f+++++++++ helm/releases/openstack/horizon.txt",
                                ">f+++++++++ helm/releases/openstack/keystone.txt",
                                ">f+++++++++ helm/releases/openstack/libvirt.txt",
                                ">f+++++++++ helm/releases/openstack/magnum.txt",
                                ">f+++++++++ helm/releases/openstack/manila.txt",
                                ">f+++++++++ helm/releases/openstack/memcached.txt",
                                ">f+++++++++ helm/releases/openstack/neutron.txt",
                                ">f+++++++++ helm/releases/openstack/nova.txt",
                                ">f+++++++++ helm/releases/openstack/octavia.txt",
                                ">f+++++++++ helm/releases/openstack/openvswitch.txt",
                                ">f+++++++++ helm/releases/openstack/ovn.txt",
                                ">f+++++++++ helm/releases/openstack/placement.txt",
                                ">f+++++++++ helm/releases/openstack/pxc-operator.txt",
                                ">f+++++++++ helm/releases/openstack/rabbitmq-cluster-operator.txt",
                                ">f+++++++++ helm/releases/openstack/staffeln.txt",
                                ">f+++++++++ helm/releases/openstack/tempest.txt",
                                ">f+++++++++ helm/releases/openstack/valkey.txt",
                                "cd+++++++++ helm/releases/rook-ceph/",
                                ">f+++++++++ helm/releases/rook-ceph/rook-ceph.txt",
                                "cd+++++++++ helm/values/",
                                "cd+++++++++ helm/values/auth-system/",
                                ">f+++++++++ helm/values/auth-system/keycloak.yaml",
                                "cd+++++++++ helm/values/cert-manager/",
                                ">f+++++++++ helm/values/cert-manager/cert-manager.yaml",
                                "cd+++++++++ helm/values/ingress-nginx/",
                                ">f+++++++++ helm/values/ingress-nginx/ingress-nginx.yaml",
                                "cd+++++++++ helm/values/kube-system/",
                                ">f+++++++++ helm/values/kube-system/cilium.yaml",
                                "cd+++++++++ helm/values/local-path-storage/",
                                ">f+++++++++ helm/values/local-path-storage/local-path-provisioner.yaml",
                                "cd+++++++++ helm/values/monitoring/",
                                ">f+++++++++ helm/values/monitoring/goldpinger.yaml",
                                ">f+++++++++ helm/values/monitoring/kube-prometheus-stack.yaml",
                                ">f+++++++++ helm/values/monitoring/loki.yaml",
                                ">f+++++++++ helm/values/monitoring/node-feature-discovery.yaml",
                                ">f+++++++++ helm/values/monitoring/prometheus-pushgateway.yaml",
                                ">f+++++++++ helm/values/monitoring/vector.yaml",
                                "cd+++++++++ helm/values/openstack/",
                                ">f+++++++++ helm/values/openstack/barbican.yaml",
                                ">f+++++++++ helm/values/openstack/ceph-provisioners.yaml",
                                ">f+++++++++ helm/values/openstack/ceph.yaml",
                                ">f+++++++++ helm/values/openstack/cinder.yaml",
                                ">f+++++++++ helm/values/openstack/glance.yaml",
                                ">f+++++++++ helm/values/openstack/heat.yaml",
                                ">f+++++++++ helm/values/openstack/horizon.yaml",
                                ">f+++++++++ helm/values/openstack/keystone.yaml",
                                ">f+++++++++ helm/values/openstack/libvirt.yaml",
                                ">f+++++++++ helm/values/openstack/magnum.yaml",
                                ">f+++++++++ helm/values/openstack/manila.yaml",
                                ">f+++++++++ helm/values/openstack/memcached.yaml",
                                ">f+++++++++ helm/values/openstack/neutron.yaml",
                                ">f+++++++++ helm/values/openstack/nova.yaml",
                                ">f+++++++++ helm/values/openstack/octavia.yaml",
                                ">f+++++++++ helm/values/openstack/openvswitch.yaml",
                                ">f+++++++++ helm/values/openstack/ovn.yaml",
                                ">f+++++++++ helm/values/openstack/placement.yaml",
                                ">f+++++++++ helm/values/openstack/pxc-operator.yaml",
                                ">f+++++++++ helm/values/openstack/rabbitmq-cluster-operator.yaml",
                                ">f+++++++++ helm/values/openstack/staffeln.yaml",
                                ">f+++++++++ helm/values/openstack/tempest.yaml",
                                ">f+++++++++ helm/values/openstack/valkey.yaml",
                                "cd+++++++++ helm/values/rook-ceph/",
                                ">f+++++++++ helm/values/rook-ceph/rook-ceph.yaml"
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-000000000010",
                        "name": "helm-release-status",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/helm-release-status"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:25:14.336646Z",
                            "start": "2026-03-01T22:25:13.820305Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-000000000015",
                        "name": "Downloads logs to executor"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/tmp/logs/objects/cluster",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/tmp/logs/objects/cluster",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": null,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/tmp/logs/objects/cluster",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/tmp/logs/objects/cluster",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-000000000017",
                        "name": "describe-kubernetes-objects",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/describe-kubernetes-objects"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:25:14.586649Z",
                            "start": "2026-03-01T22:25:14.345724Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-000000000019",
                        "name": "creating directory for cluster scoped objects"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -e\nexport OBJECT_TYPE=node,clusterrole,clusterrolebinding,storageclass,namespace\nexport PARALLELISM_FACTOR=2\n\nfunction list_objects () {\n  printf ${OBJECT_TYPE} | xargs -d ',' -I {} -P1 -n1 bash -c 'echo \"$@\"' _ {}\n}\nexport -f list_objects\n\nfunction name_objects () {\n  export OBJECT=$1\n  kubectl get ${OBJECT} -o name | xargs -L1 -I {} -P1 -n1 bash -c 'echo \"${OBJECT} ${1#*/}\"' _ {}\n}\nexport -f name_objects\n\nfunction get_objects () {\n  input=($1)\n  export OBJECT=${input[0]}\n  export NAME=${input[1]#*/}\n  echo \"${OBJECT}/${NAME}\"\n  DIR=\"/tmp/logs/objects/cluster/${OBJECT}\"\n  mkdir -p ${DIR}\n  kubectl get ${OBJECT} ${NAME} -o yaml > \"${DIR}/${NAME}.yaml\"\n  kubectl describe ${OBJECT} ${NAME} > \"${DIR}/${NAME}.txt\"\n}\nexport -f get_objects\n\nlist_objects |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'name_objects \"$@\"' _ {} |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'get_objects \"$@\"' _ {}",
                            "delta": "0:00:23.944742",
                            "end": "2026-03-01 22:25:38.789164",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -e\nexport OBJECT_TYPE=node,clusterrole,clusterrolebinding,storageclass,namespace\nexport PARALLELISM_FACTOR=2\n\nfunction list_objects () {\n  printf ${OBJECT_TYPE} | xargs -d ',' -I {} -P1 -n1 bash -c 'echo \"$@\"' _ {}\n}\nexport -f list_objects\n\nfunction name_objects () {\n  export OBJECT=$1\n  kubectl get ${OBJECT} -o name | xargs -L1 -I {} -P1 -n1 bash -c 'echo \"${OBJECT} ${1#*/}\"' _ {}\n}\nexport -f name_objects\n\nfunction get_objects () {\n  input=($1)\n  export OBJECT=${input[0]}\n  export NAME=${input[1]#*/}\n  echo \"${OBJECT}/${NAME}\"\n  DIR=\"/tmp/logs/objects/cluster/${OBJECT}\"\n  mkdir -p ${DIR}\n  kubectl get ${OBJECT} ${NAME} -o yaml > \"${DIR}/${NAME}.yaml\"\n  kubectl describe ${OBJECT} ${NAME} > \"${DIR}/${NAME}.txt\"\n}\nexport -f get_objects\n\nlist_objects |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'name_objects \"$@\"' _ {} |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'get_objects \"$@\"' _ {}",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0011-aaa1-afc9-00000000001a-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-03-01 22:25:14.844422",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "xargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value\nxargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nnode/instance\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nclusterrole/admin\nclusterrole/capi-aggregated-manager-role\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nclusterrole/capi-kubeadm-bootstrap-manager-role\nclusterrole/capi-kubeadm-control-plane-aggregated-manager-role\nclusterrole/capi-kubeadm-control-plane-manager-role\nclusterrole/capi-manager-role\nclusterrole/capo-manager-role\nclusterrole/cephfs-csi-nodeplugin\nclusterrole/cephfs-external-provisioner-runner\nclusterrole/cert-manager-cainjector\nclusterrole/cert-manager-controller-approve:cert-manager-io\nclusterrole/cert-manager-controller-certificates\nclusterrole/cert-manager-controller-certificatesigningrequests\nclusterrole/cert-manager-controller-challenges\nclusterrole/cert-manager-controller-clusterissuers\nclusterrole/cert-manager-controller-ingress-shim\nclusterrole/cert-manager-controller-issuers\nclusterrole/cert-manager-controller-orders\nclusterrole/cert-manager-edit\nclusterrole/cert-manager-view\nclusterrole/cert-manager-webhook:subjectaccessreviews\nclusterrole/cilium\nclusterrole/cilium-operator\nclusterrole/cluster-admin\nclusterrole/edit\nclusterrole/goldpinger-clusterrole\nclusterrole/ingress-nginx\nclusterrole/kube-prometheus-stack-grafana-clusterrole\nclusterrolebinding/capi-kubeadm-bootstrap-manager-rolebinding\nclusterrole/kube-prometheus-stack-kube-state-metrics\nclusterrolebinding/capi-kubeadm-control-plane-manager-rolebinding\nclusterrole/kube-prometheus-stack-operator\nclusterrolebinding/capi-manager-rolebinding\nclusterrole/kube-prometheus-stack-prometheus\nclusterrolebinding/capo-manager-rolebinding\nclusterrolebinding/cephfs-csi-nodeplugin-role\nclusterrole/kubeadm:get-nodes\nclusterrolebinding/cephfs-csi-provisioner-role\nclusterrole/local-path-provisioner\nclusterrolebinding/cert-manager-cainjector\nclusterrole/node-feature-discovery\nclusterrolebinding/cert-manager-controller-approve:cert-manager-io\nclusterrolebinding/cert-manager-controller-certificates\nclusterrole/node-feature-discovery-gc\nclusterrolebinding/cert-manager-controller-certificatesigningrequests\nclusterrole/nova-bootstrap\nclusterrolebinding/cert-manager-controller-challenges\nclusterrole/nova-cell-setup\nclusterrolebinding/cert-manager-controller-clusterissuers\nclusterrole/objectstorage-provisioner-role\nclusterrolebinding/cert-manager-controller-ingress-shim\nclusterrole/orc-image-editor-role\nclusterrolebinding/cert-manager-controller-issuers\nclusterrole/orc-image-viewer-role\nclusterrolebinding/cert-manager-controller-orders\nclusterrolebinding/cert-manager-webhook:subjectaccessreviews\nclusterrole/orc-manager-role\nclusterrolebinding/cilium\nclusterrole/orc-metrics-auth-role\nclusterrolebinding/cilium-operator\nclusterrole/orc-metrics-reader\nclusterrolebinding/cluster-admin\nclusterrole/ovn-controller\nclusterrolebinding/goldpinger-clusterrolebinding\nclusterrolebinding/ingress-nginx\nclusterrole/rabbitmq-cluster-operator-openstack\nclusterrolebinding/kube-prometheus-stack-grafana-clusterrolebinding\nclusterrole/rabbitmq-cluster-operator-openstack-admin\nclusterrolebinding/kube-prometheus-stack-kube-state-metrics\nclusterrole/rabbitmq-cluster-operator-openstack-edit\nclusterrole/rabbitmq-cluster-operator-openstack-view\nclusterrolebinding/kube-prometheus-stack-operator\nclusterrole/rabbitmq-messaging-topology-operator-openstack\nclusterrolebinding/kube-prometheus-stack-prometheus\nclusterrole/rabbitmq-messaging-topology-operator-openstack-admin\nclusterrolebinding/kubeadm:get-nodes\nclusterrole/rabbitmq-messaging-topology-operator-openstack-edit\nclusterrolebinding/kubeadm:kubelet-bootstrap\nclusterrole/rabbitmq-messaging-topology-operator-openstack-view\nclusterrolebinding/kubeadm:node-autoapprove-bootstrap\nclusterrole/rbd-csi-nodeplugin\nclusterrolebinding/kubeadm:node-autoapprove-certificate-rotation\nclusterrole/rbd-external-provisioner-runner\nclusterrolebinding/kubeadm:node-proxier\nclusterrole/rook-ceph-cluster-mgmt\nclusterrolebinding/local-path-provisioner\nclusterrole/rook-ceph-global\nclusterrolebinding/magnum-cluster-api\nclusterrole/rook-ceph-mgr-cluster\nclusterrolebinding/node-feature-discovery\nclusterrole/rook-ceph-mgr-system\nclusterrolebinding/node-feature-discovery-gc\nclusterrole/rook-ceph-object-bucket\nclusterrolebinding/nova-bootstrap\nclusterrole/rook-ceph-osd\nclusterrolebinding/nova-cell-setup\nclusterrole/rook-ceph-system\nclusterrolebinding/objectstorage-provisioner-role-binding\nclusterrole/secretgen-controller-cluster-role\nclusterrolebinding/orc-manager-rolebinding\nclusterrole/system:aggregate-to-admin\nclusterrolebinding/orc-metrics-auth-rolebinding\nclusterrole/system:aggregate-to-edit\nclusterrolebinding/ovn-controller\nclusterrole/system:aggregate-to-view\nclusterrolebinding/rabbitmq-cluster-operator-openstack\nclusterrole/system:auth-delegator\nclusterrolebinding/rabbitmq-messaging-topology-operator-openstack\nclusterrole/system:basic-user\nclusterrolebinding/rbd-csi-nodeplugin\nclusterrolebinding/rbd-csi-provisioner-role\nclusterrole/system:certificates.k8s.io:certificatesigningrequests:nodeclient\nclusterrole/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient\nclusterrolebinding/rook-ceph-global\nclusterrole/system:certificates.k8s.io:kube-apiserver-client-approver\nclusterrole/system:certificates.k8s.io:kube-apiserver-client-kubelet-approver\nclusterrolebinding/rook-ceph-mgr-cluster\nclusterrole/system:certificates.k8s.io:kubelet-serving-approver\nclusterrolebinding/rook-ceph-mgr-cluster-openstack\nclusterrole/system:certificates.k8s.io:legacy-unknown-approver\nclusterrolebinding/rook-ceph-object-bucket\nclusterrolebinding/rook-ceph-osd\nclusterrole/system:controller:attachdetach-controller\nclusterrole/system:controller:certificate-controller\nclusterrolebinding/rook-ceph-osd-openstack\nclusterrole/system:controller:clusterrole-aggregation-controller\nclusterrolebinding/rook-ceph-system\nclusterrole/system:controller:cronjob-controller\nclusterrolebinding/secretgen-controller-cluster-role-binding\nclusterrole/system:controller:daemon-set-controller\nclusterrole/system:controller:deployment-controller\nclusterrolebinding/system:basic-user\nclusterrole/system:controller:disruption-controller\nclusterrolebinding/system:controller:attachdetach-controller\nclusterrolebinding/system:controller:certificate-controller\nclusterrolebinding/system:controller:clusterrole-aggregation-controller\nclusterrole/system:controller:endpoint-controller\nclusterrolebinding/system:controller:cronjob-controller\nclusterrole/system:controller:endpointslice-controller\nclusterrolebinding/system:controller:daemon-set-controller\nclusterrole/system:controller:endpointslicemirroring-controller\nclusterrolebinding/system:controller:deployment-controller\nclusterrole/system:controller:ephemeral-volume-controller\nclusterrole/system:controller:expand-controller\nclusterrolebinding/system:controller:disruption-controller\nclusterrole/system:controller:generic-garbage-collector\nclusterrolebinding/system:controller:endpoint-controller\nclusterrole/system:controller:horizontal-pod-autoscaler\nclusterrolebinding/system:controller:endpointslice-controller\nclusterrole/system:controller:job-controller\nclusterrole/system:controller:namespace-controller\nclusterrolebinding/system:controller:endpointslicemirroring-controller\nclusterrolebinding/system:controller:ephemeral-volume-controller\nclusterrole/system:controller:node-controller\nclusterrolebinding/system:controller:expand-controller\nclusterrole/system:controller:persistent-volume-binder\nclusterrolebinding/system:controller:generic-garbage-collector\nclusterrole/system:controller:pod-garbage-collector\nclusterrolebinding/system:controller:horizontal-pod-autoscaler\nclusterrole/system:controller:pv-protection-controller\nclusterrolebinding/system:controller:job-controller\nclusterrole/system:controller:pvc-protection-controller\nclusterrolebinding/system:controller:namespace-controller\nclusterrole/system:controller:replicaset-controller\nclusterrolebinding/system:controller:node-controller\nclusterrole/system:controller:replication-controller\nclusterrolebinding/system:controller:persistent-volume-binder\nclusterrolebinding/system:controller:pod-garbage-collector\nclusterrole/system:controller:resourcequota-controller\nclusterrolebinding/system:controller:pv-protection-controller\nclusterrole/system:controller:root-ca-cert-publisher\nclusterrolebinding/system:controller:pvc-protection-controller\nclusterrole/system:controller:route-controller\nclusterrolebinding/system:controller:replicaset-controller\nclusterrole/system:controller:service-account-controller\nclusterrolebinding/system:controller:replication-controller\nclusterrole/system:controller:service-controller\nclusterrolebinding/system:controller:resourcequota-controller\nclusterrole/system:controller:statefulset-controller\nclusterrole/system:controller:ttl-after-finished-controller\nclusterrolebinding/system:controller:root-ca-cert-publisher\nclusterrole/system:controller:ttl-controller\nclusterrolebinding/system:controller:route-controller\nclusterrole/system:coredns\nclusterrolebinding/system:controller:service-account-controller\nclusterrole/system:discovery\nclusterrolebinding/system:controller:service-controller\nclusterrole/system:heapster\nclusterrolebinding/system:controller:statefulset-controller\nclusterrolebinding/system:controller:ttl-after-finished-controller\nclusterrole/system:kube-aggregator\nclusterrolebinding/system:controller:ttl-controller\nclusterrole/system:kube-controller-manager\nclusterrolebinding/system:coredns\nclusterrolebinding/system:discovery\nclusterrolebinding/system:kube-controller-manager\nclusterrolebinding/system:kube-dns\nclusterrole/system:kube-dns\nclusterrolebinding/system:kube-scheduler\nclusterrole/system:kube-scheduler\nclusterrolebinding/system:monitoring\nclusterrole/system:kubelet-api-admin\nclusterrole/system:monitoring\nclusterrolebinding/system:node\nclusterrole/system:node\nclusterrolebinding/system:node-proxier\nclusterrole/system:node-bootstrapper\nclusterrolebinding/system:public-info-viewer\nclusterrole/system:node-problem-detector\nclusterrolebinding/system:service-account-issuer-discovery\nclusterrole/system:node-proxier\nclusterrolebinding/system:volume-scheduler\nclusterrole/system:persistent-volume-provisioner\nclusterrolebinding/vector\nclusterrole/system:public-info-viewer\nclusterrole/system:service-account-issuer-discovery\nclusterrole/system:volume-scheduler\nclusterrole/vector\nclusterrole/view\nstorageclass/general\nnamespace/auth-system\nnamespace/capi-kubeadm-bootstrap-system\nnamespace/capi-kubeadm-control-plane-system\nnamespace/capi-system\nnamespace/capo-system\nnamespace/cert-manager\nnamespace/default\nnamespace/ingress-nginx\nnamespace/kube-node-lease\nnamespace/kube-public\nnamespace/kube-system\nnamespace/local-path-storage\nnamespace/magnum-system\nnamespace/monitoring\nnamespace/openstack\nnamespace/orc-system\nnamespace/rook-ceph\nnamespace/secretgen-controller",
                            "stdout_lines": [
                                "xargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value",
                                "xargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "node/instance",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "clusterrole/admin",
                                "clusterrole/capi-aggregated-manager-role",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "clusterrole/capi-kubeadm-bootstrap-manager-role",
                                "clusterrole/capi-kubeadm-control-plane-aggregated-manager-role",
                                "clusterrole/capi-kubeadm-control-plane-manager-role",
                                "clusterrole/capi-manager-role",
                                "clusterrole/capo-manager-role",
                                "clusterrole/cephfs-csi-nodeplugin",
                                "clusterrole/cephfs-external-provisioner-runner",
                                "clusterrole/cert-manager-cainjector",
                                "clusterrole/cert-manager-controller-approve:cert-manager-io",
                                "clusterrole/cert-manager-controller-certificates",
                                "clusterrole/cert-manager-controller-certificatesigningrequests",
                                "clusterrole/cert-manager-controller-challenges",
                                "clusterrole/cert-manager-controller-clusterissuers",
                                "clusterrole/cert-manager-controller-ingress-shim",
                                "clusterrole/cert-manager-controller-issuers",
                                "clusterrole/cert-manager-controller-orders",
                                "clusterrole/cert-manager-edit",
                                "clusterrole/cert-manager-view",
                                "clusterrole/cert-manager-webhook:subjectaccessreviews",
                                "clusterrole/cilium",
                                "clusterrole/cilium-operator",
                                "clusterrole/cluster-admin",
                                "clusterrole/edit",
                                "clusterrole/goldpinger-clusterrole",
                                "clusterrole/ingress-nginx",
                                "clusterrole/kube-prometheus-stack-grafana-clusterrole",
                                "clusterrolebinding/capi-kubeadm-bootstrap-manager-rolebinding",
                                "clusterrole/kube-prometheus-stack-kube-state-metrics",
                                "clusterrolebinding/capi-kubeadm-control-plane-manager-rolebinding",
                                "clusterrole/kube-prometheus-stack-operator",
                                "clusterrolebinding/capi-manager-rolebinding",
                                "clusterrole/kube-prometheus-stack-prometheus",
                                "clusterrolebinding/capo-manager-rolebinding",
                                "clusterrolebinding/cephfs-csi-nodeplugin-role",
                                "clusterrole/kubeadm:get-nodes",
                                "clusterrolebinding/cephfs-csi-provisioner-role",
                                "clusterrole/local-path-provisioner",
                                "clusterrolebinding/cert-manager-cainjector",
                                "clusterrole/node-feature-discovery",
                                "clusterrolebinding/cert-manager-controller-approve:cert-manager-io",
                                "clusterrolebinding/cert-manager-controller-certificates",
                                "clusterrole/node-feature-discovery-gc",
                                "clusterrolebinding/cert-manager-controller-certificatesigningrequests",
                                "clusterrole/nova-bootstrap",
                                "clusterrolebinding/cert-manager-controller-challenges",
                                "clusterrole/nova-cell-setup",
                                "clusterrolebinding/cert-manager-controller-clusterissuers",
                                "clusterrole/objectstorage-provisioner-role",
                                "clusterrolebinding/cert-manager-controller-ingress-shim",
                                "clusterrole/orc-image-editor-role",
                                "clusterrolebinding/cert-manager-controller-issuers",
                                "clusterrole/orc-image-viewer-role",
                                "clusterrolebinding/cert-manager-controller-orders",
                                "clusterrolebinding/cert-manager-webhook:subjectaccessreviews",
                                "clusterrole/orc-manager-role",
                                "clusterrolebinding/cilium",
                                "clusterrole/orc-metrics-auth-role",
                                "clusterrolebinding/cilium-operator",
                                "clusterrole/orc-metrics-reader",
                                "clusterrolebinding/cluster-admin",
                                "clusterrole/ovn-controller",
                                "clusterrolebinding/goldpinger-clusterrolebinding",
                                "clusterrolebinding/ingress-nginx",
                                "clusterrole/rabbitmq-cluster-operator-openstack",
                                "clusterrolebinding/kube-prometheus-stack-grafana-clusterrolebinding",
                                "clusterrole/rabbitmq-cluster-operator-openstack-admin",
                                "clusterrolebinding/kube-prometheus-stack-kube-state-metrics",
                                "clusterrole/rabbitmq-cluster-operator-openstack-edit",
                                "clusterrole/rabbitmq-cluster-operator-openstack-view",
                                "clusterrolebinding/kube-prometheus-stack-operator",
                                "clusterrole/rabbitmq-messaging-topology-operator-openstack",
                                "clusterrolebinding/kube-prometheus-stack-prometheus",
                                "clusterrole/rabbitmq-messaging-topology-operator-openstack-admin",
                                "clusterrolebinding/kubeadm:get-nodes",
                                "clusterrole/rabbitmq-messaging-topology-operator-openstack-edit",
                                "clusterrolebinding/kubeadm:kubelet-bootstrap",
                                "clusterrole/rabbitmq-messaging-topology-operator-openstack-view",
                                "clusterrolebinding/kubeadm:node-autoapprove-bootstrap",
                                "clusterrole/rbd-csi-nodeplugin",
                                "clusterrolebinding/kubeadm:node-autoapprove-certificate-rotation",
                                "clusterrole/rbd-external-provisioner-runner",
                                "clusterrolebinding/kubeadm:node-proxier",
                                "clusterrole/rook-ceph-cluster-mgmt",
                                "clusterrolebinding/local-path-provisioner",
                                "clusterrole/rook-ceph-global",
                                "clusterrolebinding/magnum-cluster-api",
                                "clusterrole/rook-ceph-mgr-cluster",
                                "clusterrolebinding/node-feature-discovery",
                                "clusterrole/rook-ceph-mgr-system",
                                "clusterrolebinding/node-feature-discovery-gc",
                                "clusterrole/rook-ceph-object-bucket",
                                "clusterrolebinding/nova-bootstrap",
                                "clusterrole/rook-ceph-osd",
                                "clusterrolebinding/nova-cell-setup",
                                "clusterrole/rook-ceph-system",
                                "clusterrolebinding/objectstorage-provisioner-role-binding",
                                "clusterrole/secretgen-controller-cluster-role",
                                "clusterrolebinding/orc-manager-rolebinding",
                                "clusterrole/system:aggregate-to-admin",
                                "clusterrolebinding/orc-metrics-auth-rolebinding",
                                "clusterrole/system:aggregate-to-edit",
                                "clusterrolebinding/ovn-controller",
                                "clusterrole/system:aggregate-to-view",
                                "clusterrolebinding/rabbitmq-cluster-operator-openstack",
                                "clusterrole/system:auth-delegator",
                                "clusterrolebinding/rabbitmq-messaging-topology-operator-openstack",
                                "clusterrole/system:basic-user",
                                "clusterrolebinding/rbd-csi-nodeplugin",
                                "clusterrolebinding/rbd-csi-provisioner-role",
                                "clusterrole/system:certificates.k8s.io:certificatesigningrequests:nodeclient",
                                "clusterrole/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient",
                                "clusterrolebinding/rook-ceph-global",
                                "clusterrole/system:certificates.k8s.io:kube-apiserver-client-approver",
                                "clusterrole/system:certificates.k8s.io:kube-apiserver-client-kubelet-approver",
                                "clusterrolebinding/rook-ceph-mgr-cluster",
                                "clusterrole/system:certificates.k8s.io:kubelet-serving-approver",
                                "clusterrolebinding/rook-ceph-mgr-cluster-openstack",
                                "clusterrole/system:certificates.k8s.io:legacy-unknown-approver",
                                "clusterrolebinding/rook-ceph-object-bucket",
                                "clusterrolebinding/rook-ceph-osd",
                                "clusterrole/system:controller:attachdetach-controller",
                                "clusterrole/system:controller:certificate-controller",
                                "clusterrolebinding/rook-ceph-osd-openstack",
                                "clusterrole/system:controller:clusterrole-aggregation-controller",
                                "clusterrolebinding/rook-ceph-system",
                                "clusterrole/system:controller:cronjob-controller",
                                "clusterrolebinding/secretgen-controller-cluster-role-binding",
                                "clusterrole/system:controller:daemon-set-controller",
                                "clusterrole/system:controller:deployment-controller",
                                "clusterrolebinding/system:basic-user",
                                "clusterrole/system:controller:disruption-controller",
                                "clusterrolebinding/system:controller:attachdetach-controller",
                                "clusterrolebinding/system:controller:certificate-controller",
                                "clusterrolebinding/system:controller:clusterrole-aggregation-controller",
                                "clusterrole/system:controller:endpoint-controller",
                                "clusterrolebinding/system:controller:cronjob-controller",
                                "clusterrole/system:controller:endpointslice-controller",
                                "clusterrolebinding/system:controller:daemon-set-controller",
                                "clusterrole/system:controller:endpointslicemirroring-controller",
                                "clusterrolebinding/system:controller:deployment-controller",
                                "clusterrole/system:controller:ephemeral-volume-controller",
                                "clusterrole/system:controller:expand-controller",
                                "clusterrolebinding/system:controller:disruption-controller",
                                "clusterrole/system:controller:generic-garbage-collector",
                                "clusterrolebinding/system:controller:endpoint-controller",
                                "clusterrole/system:controller:horizontal-pod-autoscaler",
                                "clusterrolebinding/system:controller:endpointslice-controller",
                                "clusterrole/system:controller:job-controller",
                                "clusterrole/system:controller:namespace-controller",
                                "clusterrolebinding/system:controller:endpointslicemirroring-controller",
                                "clusterrolebinding/system:controller:ephemeral-volume-controller",
                                "clusterrole/system:controller:node-controller",
                                "clusterrolebinding/system:controller:expand-controller",
                                "clusterrole/system:controller:persistent-volume-binder",
                                "clusterrolebinding/system:controller:generic-garbage-collector",
                                "clusterrole/system:controller:pod-garbage-collector",
                                "clusterrolebinding/system:controller:horizontal-pod-autoscaler",
                                "clusterrole/system:controller:pv-protection-controller",
                                "clusterrolebinding/system:controller:job-controller",
                                "clusterrole/system:controller:pvc-protection-controller",
                                "clusterrolebinding/system:controller:namespace-controller",
                                "clusterrole/system:controller:replicaset-controller",
                                "clusterrolebinding/system:controller:node-controller",
                                "clusterrole/system:controller:replication-controller",
                                "clusterrolebinding/system:controller:persistent-volume-binder",
                                "clusterrolebinding/system:controller:pod-garbage-collector",
                                "clusterrole/system:controller:resourcequota-controller",
                                "clusterrolebinding/system:controller:pv-protection-controller",
                                "clusterrole/system:controller:root-ca-cert-publisher",
                                "clusterrolebinding/system:controller:pvc-protection-controller",
                                "clusterrole/system:controller:route-controller",
                                "clusterrolebinding/system:controller:replicaset-controller",
                                "clusterrole/system:controller:service-account-controller",
                                "clusterrolebinding/system:controller:replication-controller",
                                "clusterrole/system:controller:service-controller",
                                "clusterrolebinding/system:controller:resourcequota-controller",
                                "clusterrole/system:controller:statefulset-controller",
                                "clusterrole/system:controller:ttl-after-finished-controller",
                                "clusterrolebinding/system:controller:root-ca-cert-publisher",
                                "clusterrole/system:controller:ttl-controller",
                                "clusterrolebinding/system:controller:route-controller",
                                "clusterrole/system:coredns",
                                "clusterrolebinding/system:controller:service-account-controller",
                                "clusterrole/system:discovery",
                                "clusterrolebinding/system:controller:service-controller",
                                "clusterrole/system:heapster",
                                "clusterrolebinding/system:controller:statefulset-controller",
                                "clusterrolebinding/system:controller:ttl-after-finished-controller",
                                "clusterrole/system:kube-aggregator",
                                "clusterrolebinding/system:controller:ttl-controller",
                                "clusterrole/system:kube-controller-manager",
                                "clusterrolebinding/system:coredns",
                                "clusterrolebinding/system:discovery",
                                "clusterrolebinding/system:kube-controller-manager",
                                "clusterrolebinding/system:kube-dns",
                                "clusterrole/system:kube-dns",
                                "clusterrolebinding/system:kube-scheduler",
                                "clusterrole/system:kube-scheduler",
                                "clusterrolebinding/system:monitoring",
                                "clusterrole/system:kubelet-api-admin",
                                "clusterrole/system:monitoring",
                                "clusterrolebinding/system:node",
                                "clusterrole/system:node",
                                "clusterrolebinding/system:node-proxier",
                                "clusterrole/system:node-bootstrapper",
                                "clusterrolebinding/system:public-info-viewer",
                                "clusterrole/system:node-problem-detector",
                                "clusterrolebinding/system:service-account-issuer-discovery",
                                "clusterrole/system:node-proxier",
                                "clusterrolebinding/system:volume-scheduler",
                                "clusterrole/system:persistent-volume-provisioner",
                                "clusterrolebinding/vector",
                                "clusterrole/system:public-info-viewer",
                                "clusterrole/system:service-account-issuer-discovery",
                                "clusterrole/system:volume-scheduler",
                                "clusterrole/vector",
                                "clusterrole/view",
                                "storageclass/general",
                                "namespace/auth-system",
                                "namespace/capi-kubeadm-bootstrap-system",
                                "namespace/capi-kubeadm-control-plane-system",
                                "namespace/capi-system",
                                "namespace/capo-system",
                                "namespace/cert-manager",
                                "namespace/default",
                                "namespace/ingress-nginx",
                                "namespace/kube-node-lease",
                                "namespace/kube-public",
                                "namespace/kube-system",
                                "namespace/local-path-storage",
                                "namespace/magnum-system",
                                "namespace/monitoring",
                                "namespace/openstack",
                                "namespace/orc-system",
                                "namespace/rook-ceph",
                                "namespace/secretgen-controller"
                            ],
                            "zuul_log_id": "0242ac17-0011-aaa1-afc9-00000000001a-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-000000000017",
                        "name": "describe-kubernetes-objects",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/describe-kubernetes-objects"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:25:39.203296Z",
                            "start": "2026-03-01T22:25:14.651388Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-00000000001a",
                        "name": "Gathering descriptions for cluster scoped objects"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/tmp/logs/objects/namespaced",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/tmp/logs/objects/namespaced",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": null,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/tmp/logs/objects/namespaced",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/tmp/logs/objects/namespaced",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-000000000017",
                        "name": "describe-kubernetes-objects",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/describe-kubernetes-objects"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:25:39.416007Z",
                            "start": "2026-03-01T22:25:39.209384Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-00000000001b",
                        "name": "creating directory for namespace scoped objects"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -e\nexport OBJECT_TYPE=configmaps,cronjobs,daemonsets,deployment,endpoints,ingresses,jobs,networkpolicies,pods,podsecuritypolicies,persistentvolumeclaims,rolebindings,roles,secrets,serviceaccounts,services,statefulsets\nexport PARALLELISM_FACTOR=2\nfunction get_namespaces () {\n  kubectl get namespaces -o name | awk -F '/' '{ print $NF }'\n}\n\nfunction list_namespaced_objects () {\n  export NAMESPACE=$1\n  printf ${OBJECT_TYPE} | xargs -d ',' -I {} -P1 -n1 bash -c 'echo \"${NAMESPACE} $@\"' _ {}\n}\nexport -f list_namespaced_objects\n\nfunction name_objects () {\n  input=($1)\n  export NAMESPACE=${input[0]}\n  export OBJECT=${input[1]}\n  kubectl get -n ${NAMESPACE} ${OBJECT} -o name | xargs -L1 -I {} -P1 -n1 bash -c 'echo \"${NAMESPACE} ${OBJECT} $@\"' _ {}\n}\nexport -f name_objects\n\nfunction get_objects () {\n  input=($1)\n  export NAMESPACE=${input[0]}\n  export OBJECT=${input[1]}\n  export NAME=${input[2]#*/}\n  echo \"${NAMESPACE}/${OBJECT}/${NAME}\"\n  DIR=\"/tmp/logs/objects/namespaced/${NAMESPACE}/${OBJECT}\"\n  mkdir -p ${DIR}\n  kubectl get -n ${NAMESPACE} ${OBJECT} ${NAME} -o yaml > \"${DIR}/${NAME}.yaml\"\n  kubectl describe -n ${NAMESPACE} ${OBJECT} ${NAME} > \"${DIR}/${NAME}.txt\"\n}\nexport -f get_objects\n\nget_namespaces |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'list_namespaced_objects \"$@\"' _ {} |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'name_objects \"$@\"' _ {} |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'get_objects \"$@\"' _ {}",
                            "delta": "0:03:16.677730",
                            "end": "2026-03-01 22:28:56.298630",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -e\nexport OBJECT_TYPE=configmaps,cronjobs,daemonsets,deployment,endpoints,ingresses,jobs,networkpolicies,pods,podsecuritypolicies,persistentvolumeclaims,rolebindings,roles,secrets,serviceaccounts,services,statefulsets\nexport PARALLELISM_FACTOR=2\nfunction get_namespaces () {\n  kubectl get namespaces -o name | awk -F '/' '{ print $NF }'\n}\n\nfunction list_namespaced_objects () {\n  export NAMESPACE=$1\n  printf ${OBJECT_TYPE} | xargs -d ',' -I {} -P1 -n1 bash -c 'echo \"${NAMESPACE} $@\"' _ {}\n}\nexport -f list_namespaced_objects\n\nfunction name_objects () {\n  input=($1)\n  export NAMESPACE=${input[0]}\n  export OBJECT=${input[1]}\n  kubectl get -n ${NAMESPACE} ${OBJECT} -o name | xargs -L1 -I {} -P1 -n1 bash -c 'echo \"${NAMESPACE} ${OBJECT} $@\"' _ {}\n}\nexport -f name_objects\n\nfunction get_objects () {\n  input=($1)\n  export NAMESPACE=${input[0]}\n  export OBJECT=${input[1]}\n  export NAME=${input[2]#*/}\n  echo \"${NAMESPACE}/${OBJECT}/${NAME}\"\n  DIR=\"/tmp/logs/objects/namespaced/${NAMESPACE}/${OBJECT}\"\n  mkdir -p ${DIR}\n  kubectl get -n ${NAMESPACE} ${OBJECT} ${NAME} -o yaml > \"${DIR}/${NAME}.yaml\"\n  kubectl describe -n ${NAMESPACE} ${OBJECT} ${NAME} > \"${DIR}/${NAME}.txt\"\n}\nexport -f get_objects\n\nget_namespaces |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'list_namespaced_objects \"$@\"' _ {} |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'name_objects \"$@\"' _ {} |  xargs -r -n 1 -P ${PARALLELISM_FACTOR} -I {} bash -c 'get_objects \"$@\"' _ {}",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0011-aaa1-afc9-00000000001c-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-03-01 22:25:39.620900",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "xargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value\nxargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value\nxargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value\nxargs: xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nwarning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nauth-system/configmaps/keycloak-env-vars\nauth-system/configmaps/kube-root-ca.crt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-bootstrap-system/configmaps/kube-root-ca.crt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-bootstrap-system/deployment/capi-kubeadm-bootstrap-controller-manager\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nauth-system/endpoints/keycloak\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nauth-system/endpoints/keycloak-headless\nauth-system/endpoints/keycloak-metrics\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-bootstrap-system/endpoints/capi-kubeadm-bootstrap-webhook-service\nauth-system/ingresses/keycloak\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nauth-system/networkpolicies/keycloak\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-bootstrap-system/pods/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nauth-system/pods/keycloak-0\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-bootstrap-system/rolebindings/capi-kubeadm-bootstrap-leader-election-rolebinding\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-bootstrap-system/roles/capi-kubeadm-bootstrap-leader-election-role\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-bootstrap-system/secrets/capi-kubeadm-bootstrap-webhook-service-cert\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nauth-system/secrets/keycloak\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nauth-system/secrets/keycloak-externaldb\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nauth-system/secrets/keycloak.199-204-45-156.nip.io-tls\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nauth-system/secrets/sh.helm.release.v1.keycloak.v1\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-bootstrap-system/serviceaccounts/capi-kubeadm-bootstrap-manager\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-bootstrap-system/serviceaccounts/default\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nauth-system/serviceaccounts/default\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nauth-system/serviceaccounts/keycloak\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-bootstrap-system/services/capi-kubeadm-bootstrap-webhook-service\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nauth-system/services/keycloak\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nauth-system/services/keycloak-headless\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nauth-system/services/keycloak-metrics\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nauth-system/statefulsets/keycloak\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-control-plane-system/configmaps/kube-root-ca.crt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-system/configmaps/kube-root-ca.crt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-control-plane-system/deployment/capi-kubeadm-control-plane-controller-manager\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-system/deployment/capi-controller-manager\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-control-plane-system/endpoints/capi-kubeadm-control-plane-webhook-service\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-system/endpoints/capi-webhook-service\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-control-plane-system/pods/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-system/pods/capi-controller-manager-bc4cf8c95-w8p6b\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-control-plane-system/rolebindings/capi-kubeadm-control-plane-leader-election-rolebinding\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-control-plane-system/roles/capi-kubeadm-control-plane-leader-election-role\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-control-plane-system/secrets/capi-kubeadm-control-plane-webhook-service-cert\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-system/rolebindings/capi-leader-election-rolebinding\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-system/roles/capi-leader-election-role\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-control-plane-system/serviceaccounts/capi-kubeadm-control-plane-manager\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-kubeadm-control-plane-system/serviceaccounts/default\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-system/secrets/capi-webhook-service-cert\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-system/serviceaccounts/capi-manager\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-system/serviceaccounts/default\ncapi-kubeadm-control-plane-system/services/capi-kubeadm-control-plane-webhook-service\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapi-system/services/capi-webhook-service\ncapo-system/configmaps/kube-root-ca.crt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/configmaps/cert-manager-webhook\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/configmaps/kube-root-ca.crt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapo-system/deployment/capo-controller-manager\ncert-manager/deployment/cert-manager\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/deployment/cert-manager-cainjector\ncert-manager/deployment/cert-manager-webhook\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapo-system/endpoints/capo-webhook-service\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/endpoints/cert-manager\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/endpoints/cert-manager-webhook\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapo-system/pods/capo-controller-manager-6975759b4b-tkxrs\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/pods/cert-manager-75c4c745bc-45s4r\ncert-manager/pods/cert-manager-cainjector-64b59ddb75-tl5x7\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\ncert-manager/pods/cert-manager-webhook-548949fc64-vkrlt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapo-system/rolebindings/capo-leader-election-rolebinding\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapo-system/roles/capo-leader-election-role\ncapo-system/secrets/capo-webhook-service-cert\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/rolebindings/cert-manager-cainjector:leaderelection\ncert-manager/rolebindings/cert-manager-webhook:dynamic-serving\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/rolebindings/cert-manager:leaderelection\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapo-system/serviceaccounts/capo-manager\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapo-system/serviceaccounts/default\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/roles/cert-manager-cainjector:leaderelection\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/roles/cert-manager-webhook:dynamic-serving\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/roles/cert-manager:leaderelection\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncapo-system/services/capo-webhook-service\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/secrets/cert-manager-selfsigned-ca\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/secrets/cert-manager-webhook-ca\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\ncert-manager/secrets/kube-prometheus-stack-ca\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/secrets/sh.helm.release.v1.cert-manager.v1\ncert-manager/services/cert-manager\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/serviceaccounts/cert-manager\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/services/cert-manager-webhook\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/serviceaccounts/cert-manager-cainjector\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/serviceaccounts/cert-manager-webhook\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ncert-manager/serviceaccounts/default\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ndefault/configmaps/kube-root-ca.crt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/configmaps/ingress-nginx-controller\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/configmaps/ingress-nginx-tcp\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/configmaps/ingress-nginx-udp\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/configmaps/kube-root-ca.crt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ndefault/endpoints/kubernetes\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/daemonsets/ingress-nginx-controller\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/deployment/ingress-nginx-defaultbackend\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/endpoints/ingress-nginx-controller\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/endpoints/ingress-nginx-controller-admission\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/endpoints/ingress-nginx-controller-metrics\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/endpoints/ingress-nginx-defaultbackend\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/pods/ingress-nginx-controller-j4bqv\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/pods/ingress-nginx-defaultbackend-6987ff55cf-gpx4l\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/rolebindings/ingress-nginx\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/roles/ingress-nginx\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/secrets/ingress-nginx-admission\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/secrets/sh.helm.release.v1.ingress-nginx.v1\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/serviceaccounts/default\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/serviceaccounts/ingress-nginx\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/serviceaccounts/ingress-nginx-backend\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ndefault/serviceaccounts/default\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/services/ingress-nginx-controller\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/services/ingress-nginx-controller-admission\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/services/ingress-nginx-controller-metrics\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ningress-nginx/services/ingress-nginx-defaultbackend\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\ndefault/services/kubernetes\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-node-lease/configmaps/kube-root-ca.crt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-public/configmaps/cluster-info\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-public/configmaps/kube-root-ca.crt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-node-lease/serviceaccounts/default\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-public/rolebindings/kubeadm:bootstrap-signer-clusterinfo\nkube-public/rolebindings/system:controller:bootstrap-signer\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-public/roles/kubeadm:bootstrap-signer-clusterinfo\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-public/roles/system:controller:bootstrap-signer\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/configmaps/cilium-config\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/configmaps/coredns\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/configmaps/extension-apiserver-authentication\nkube-system/configmaps/kube-apiserver-legacy-service-account-token-tracking\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/configmaps/kube-proxy\nkube-system/configmaps/kube-root-ca.crt\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/configmaps/kubeadm-config\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/configmaps/kubelet-config\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-public/serviceaccounts/default\nkube-system/daemonsets/cilium\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/daemonsets/kube-proxy\nkube-system/deployment/cilium-operator\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/deployment/coredns\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/endpoints/kube-dns\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/endpoints/kube-prometheus-stack-coredns\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/endpoints/kube-prometheus-stack-kube-controller-manager\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/endpoints/kube-prometheus-stack-kube-etcd\nkube-system/endpoints/kube-prometheus-stack-kube-proxy\nkube-system/endpoints/kube-prometheus-stack-kube-scheduler\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/endpoints/kube-prometheus-stack-kubelet\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nlocal-path-storage/configmaps/kube-root-ca.crt\nlocal-path-storage/configmaps/local-path-config\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/pods/cilium-operator-869df985b8-kszk2\nkube-system/pods/cilium-vdz4f\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/pods/coredns-67659f764b-6f2mm\nkube-system/pods/coredns-67659f764b-j6fp4\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/pods/etcd-instance\nkube-system/pods/kube-apiserver-instance\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nkube-system/pods/kube-controller-manager-instance\nkube-system/pods/kube-proxy-sp2vs\nkube-system/pods/kube-scheduler-instance\nkube-system/pods/kube-vip-instance\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nlocal-path-storage/deployment/local-path-provisioner\nkube-system/rolebindings/cilium-config-agent\nkube-system/rolebindings/kube-proxy\nkube-system/rolebindings/kubeadm:kubeadm-certs\nkube-system/rolebindings/kubeadm:kubelet-config\nkube-system/rolebindings/kubeadm:nodes-kubeadm-config\nkube-system/rolebindings/system::extension-apiserver-authentication-reader\nkube-system/rolebindings/system::leader-locking-kube-controller-manager\nkube-system/rolebindings/system::leader-locking-kube-scheduler\nkube-system/rolebindings/system:controller:bootstrap-signer\nkube-system/rolebindings/system:controller:cloud-provider\nkube-system/rolebindings/system:controller:token-cleaner\nkube-system/roles/cilium-config-agent\nkube-system/roles/extension-apiserver-authentication-reader\nkube-system/roles/kube-proxy\nkube-system/roles/kubeadm:kubeadm-certs\nkube-system/roles/kubeadm:kubelet-config\nkube-system/roles/kubeadm:nodes-kubeadm-config\nkube-system/roles/system::leader-locking-kube-controller-manager\nkube-system/roles/system::leader-locking-kube-scheduler\nkube-system/roles/system:controller:bootstrap-signer\nkube-system/roles/system:controller:cloud-provider\nkube-system/roles/system:controller:token-cleaner\nlocal-path-storage/pods/local-path-provisioner-679c578f5-7h8w5\nkube-system/secrets/bootstrap-token-592olp\nkube-system/secrets/bootstrap-token-vahv4y\nkube-system/secrets/kubeadm-certs\nkube-system/secrets/sh.helm.release.v1.cilium.v1\nkube-system/serviceaccounts/attachdetach-controller\nkube-system/serviceaccounts/bootstrap-signer\nkube-system/serviceaccounts/certificate-controller\nkube-system/serviceaccounts/cilium\nkube-system/serviceaccounts/cilium-operator\nkube-system/serviceaccounts/clusterrole-aggregation-controller\nkube-system/serviceaccounts/coredns\nkube-system/serviceaccounts/cronjob-controller\nkube-system/serviceaccounts/daemon-set-controller\nkube-system/serviceaccounts/default\nkube-system/serviceaccounts/deployment-controller\nkube-system/serviceaccounts/disruption-controller\nkube-system/serviceaccounts/endpoint-controller\nkube-system/serviceaccounts/endpointslice-controller\nkube-system/serviceaccounts/endpointslicemirroring-controller\nkube-system/serviceaccounts/ephemeral-volume-controller\nkube-system/serviceaccounts/expand-controller\nkube-system/serviceaccounts/generic-garbage-collector\nkube-system/serviceaccounts/horizontal-pod-autoscaler\nkube-system/serviceaccounts/job-controller\nkube-system/serviceaccounts/kube-proxy\nkube-system/serviceaccounts/namespace-controller\nkube-system/serviceaccounts/node-controller\nkube-system/serviceaccounts/persistent-volume-binder\nkube-system/serviceaccounts/pod-garbage-collector\nkube-system/serviceaccounts/pv-protection-controller\nkube-system/serviceaccounts/pvc-protection-controller\nkube-system/serviceaccounts/replicaset-controller\nkube-system/serviceaccounts/replication-controller\nkube-system/serviceaccounts/resourcequota-controller\nkube-system/serviceaccounts/root-ca-cert-publisher\nkube-system/serviceaccounts/service-account-controller\nkube-system/serviceaccounts/service-controller\nkube-system/serviceaccounts/statefulset-controller\nkube-system/serviceaccounts/token-cleaner\nkube-system/serviceaccounts/ttl-after-finished-controller\nkube-system/serviceaccounts/ttl-controller\nkube-system/services/kube-dns\nkube-system/services/kube-prometheus-stack-coredns\nkube-system/services/kube-prometheus-stack-kube-controller-manager\nkube-system/services/kube-prometheus-stack-kube-etcd\nkube-system/services/kube-prometheus-stack-kube-proxy\nkube-system/services/kube-prometheus-stack-kube-scheduler\nkube-system/services/kube-prometheus-stack-kubelet\nmagnum-system/configmaps/kube-root-ca.crt\nlocal-path-storage/secrets/sh.helm.release.v1.local-path-provisioner.v1\nlocal-path-storage/serviceaccounts/default\nlocal-path-storage/serviceaccounts/local-path-provisioner\nmonitoring/configmaps/goldpinger-zap\nmonitoring/configmaps/ipmi-exporter\nmonitoring/configmaps/kube-prometheus-stack-alertmanager-overview\nmonitoring/configmaps/kube-prometheus-stack-apiserver\nmonitoring/configmaps/kube-prometheus-stack-cluster-total\nmonitoring/configmaps/kube-prometheus-stack-controller-manager\nmonitoring/configmaps/kube-prometheus-stack-dashboard-ceph-cluster\nmonitoring/configmaps/kube-prometheus-stack-dashboard-ceph-cluster-advanced\nmonitoring/configmaps/kube-prometheus-stack-dashboard-goldpinger\nmonitoring/configmaps/kube-prometheus-stack-dashboard-haproxy\nmonitoring/configmaps/kube-prometheus-stack-dashboard-host-details\nmonitoring/configmaps/kube-prometheus-stack-dashboard-hosts-overview\nmonitoring/configmaps/kube-prometheus-stack-dashboard-node-exporter-full\nmonitoring/configmaps/kube-prometheus-stack-dashboard-osd-device-details\nmonitoring/configmaps/kube-prometheus-stack-dashboard-osds-overview\nmonitoring/configmaps/kube-prometheus-stack-dashboard-pool-detail\nmonitoring/configmaps/kube-prometheus-stack-dashboard-pool-overview\nmonitoring/configmaps/kube-prometheus-stack-dashboard-rbd-details\nmonitoring/configmaps/kube-prometheus-stack-dashboard-rbd-overview\nmonitoring/configmaps/kube-prometheus-stack-etcd\nmonitoring/configmaps/kube-prometheus-stack-grafana\nmonitoring/configmaps/kube-prometheus-stack-grafana-config-dashboards\nmonitoring/configmaps/kube-prometheus-stack-grafana-datasource\nmonitoring/configmaps/kube-prometheus-stack-grafana-overview\nmonitoring/configmaps/kube-prometheus-stack-k8s-coredns\nmonitoring/configmaps/kube-prometheus-stack-k8s-resources-cluster\nmonitoring/configmaps/kube-prometheus-stack-k8s-resources-multicluster\nmonitoring/configmaps/kube-prometheus-stack-k8s-resources-namespace\nmonitoring/configmaps/kube-prometheus-stack-k8s-resources-node\nmonitoring/configmaps/kube-prometheus-stack-k8s-resources-pod\nmonitoring/configmaps/kube-prometheus-stack-k8s-resources-workload\nmonitoring/configmaps/kube-prometheus-stack-k8s-resources-workloads-namespace\nmonitoring/configmaps/kube-prometheus-stack-kubelet\nmonitoring/configmaps/kube-prometheus-stack-namespace-by-pod\nmonitoring/configmaps/kube-prometheus-stack-namespace-by-workload\nmonitoring/configmaps/kube-prometheus-stack-node-cluster-rsrc-use\nmonitoring/configmaps/kube-prometheus-stack-node-exporter\nmonitoring/configmaps/kube-prometheus-stack-node-rsrc-use\nmonitoring/configmaps/kube-prometheus-stack-nodes\nmonitoring/configmaps/kube-prometheus-stack-nodes-darwin\nmonitoring/configmaps/kube-prometheus-stack-persistentvolumesusage\nmonitoring/configmaps/kube-prometheus-stack-pod-total\nmonitoring/configmaps/kube-prometheus-stack-prometheus\nmonitoring/daemonsets/goldpinger\nmonitoring/configmaps/kube-prometheus-stack-prometheus-tls\nmonitoring/daemonsets/ipmi-exporter\nmonitoring/configmaps/kube-prometheus-stack-proxy\nmonitoring/daemonsets/kube-prometheus-stack-prometheus-node-exporter\nmonitoring/configmaps/kube-prometheus-stack-scheduler\nmonitoring/daemonsets/node-feature-discovery-worker\nmonitoring/configmaps/kube-prometheus-stack-workload-total\nmonitoring/daemonsets/vector\nmonitoring/configmaps/kube-root-ca.crt\nmonitoring/configmaps/loki\nmonitoring/configmaps/loki-alerting-rules\nmonitoring/configmaps/loki-gateway\nmonitoring/configmaps/loki-runtime\nmonitoring/configmaps/node-feature-discovery-master-conf\nmonitoring/configmaps/node-feature-discovery-topology-updater-conf\nmonitoring/configmaps/node-feature-discovery-worker-conf\nmonitoring/configmaps/prometheus-kube-prometheus-stack-prometheus-rulefiles-0\nmonitoring/configmaps/vector\nmonitoring/deployment/kube-prometheus-stack-grafana\nmonitoring/deployment/kube-prometheus-stack-kube-state-metrics\nmonitoring/deployment/kube-prometheus-stack-operator\nmonitoring/deployment/loki-gateway\nmonitoring/deployment/node-feature-discovery-gc\nmonitoring/deployment/node-feature-discovery-master\nmonitoring/deployment/prometheus-pushgateway\nmonitoring/endpoints/alertmanager-operated\nmonitoring/endpoints/goldpinger\nmonitoring/endpoints/kube-prometheus-stack-alertmanager\nmonitoring/endpoints/kube-prometheus-stack-grafana\nmonitoring/endpoints/kube-prometheus-stack-kube-state-metrics\nmonitoring/endpoints/kube-prometheus-stack-operator\nmonitoring/endpoints/kube-prometheus-stack-prometheus\nmonitoring/endpoints/kube-prometheus-stack-prometheus-node-exporter\nmonitoring/endpoints/loki\nmonitoring/endpoints/loki-chunks-cache\nmonitoring/endpoints/loki-gateway\nmonitoring/endpoints/loki-headless\nmonitoring/endpoints/loki-memberlist\nmonitoring/endpoints/loki-results-cache\nmonitoring/endpoints/prometheus-operated\nmonitoring/endpoints/prometheus-pushgateway\nmonitoring/endpoints/vector-headless\nmonitoring/ingresses/kube-prometheus-stack-alertmanager\nmonitoring/ingresses/kube-prometheus-stack-grafana\nmonitoring/ingresses/kube-prometheus-stack-prometheus\nmagnum-system/serviceaccounts/default\nmonitoring/pods/alertmanager-kube-prometheus-stack-alertmanager-0\nmonitoring/pods/goldpinger-7jzp8\nmonitoring/pods/kube-prometheus-stack-grafana-668bfb9659-ft52b\nmonitoring/pods/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m\nmonitoring/pods/kube-prometheus-stack-operator-cd88cf4bf-lzh7g\nmonitoring/pods/kube-prometheus-stack-prometheus-node-exporter-59qlm\nmonitoring/pods/loki-0\nmonitoring/pods/loki-chunks-cache-0\nmonitoring/pods/loki-gateway-cf54cb88c-zv654\nmonitoring/pods/loki-results-cache-0\nmonitoring/pods/node-feature-discovery-gc-6675cbb6d9-zv9sn\nmonitoring/pods/node-feature-discovery-master-8665476dbc-t4z5z\nmonitoring/pods/node-feature-discovery-worker-p8lmk\nmonitoring/pods/prometheus-kube-prometheus-stack-prometheus-0\nmonitoring/pods/prometheus-pushgateway-7b8659c68b-28dht\nmonitoring/pods/vector-qzjms\nmonitoring/persistentvolumeclaims/alertmanager-kube-prometheus-stack-alertmanager-db-alertmanager-kube-prometheus-stack-alertmanager-0\nmonitoring/persistentvolumeclaims/prometheus-kube-prometheus-stack-prometheus-db-prometheus-kube-prometheus-stack-prometheus-0\nmonitoring/persistentvolumeclaims/storage-loki-0\nmonitoring/rolebindings/kube-prometheus-stack-grafana\nmonitoring/rolebindings/kube-prometheus-stack-pod-tls-sidecar\nmonitoring/rolebindings/node-feature-discovery-worker\nmonitoring/roles/kube-prometheus-stack-grafana\nmonitoring/roles/kube-prometheus-stack-pod-tls-sidecar\nmonitoring/roles/node-feature-discovery-worker\nmonitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager\nmonitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-generated\nmonitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-tls-assets-0\nmonitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-web-config\nmonitoring/secrets/alertmanager-tls\nmonitoring/secrets/grafana-tls\nmonitoring/secrets/kube-prometheus-stack-admission\nmonitoring/secrets/kube-prometheus-stack-alertmanager-client-secret\nmonitoring/secrets/kube-prometheus-stack-alertmanager-cookie-secret\nmonitoring/secrets/kube-prometheus-stack-alertmanager-oauth2-proxy\nmonitoring/secrets/kube-prometheus-stack-etcd-client-cert\nmonitoring/secrets/kube-prometheus-stack-grafana\nmonitoring/secrets/kube-prometheus-stack-grafana-client-secret\nmonitoring/secrets/kube-prometheus-stack-prometheus-client-secret\nmonitoring/secrets/kube-prometheus-stack-prometheus-cookie-secret\nmonitoring/secrets/kube-prometheus-stack-prometheus-node-exporter-59qlm-tls\nmonitoring/secrets/kube-prometheus-stack-prometheus-oauth2-proxy\nmonitoring/secrets/prometheus-kube-prometheus-stack-prometheus\nmonitoring/secrets/prometheus-kube-prometheus-stack-prometheus-0-tls\nmonitoring/secrets/prometheus-kube-prometheus-stack-prometheus-tls-assets-0\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/configmaps/barbican-bin\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nmonitoring/secrets/prometheus-kube-prometheus-stack-prometheus-web-config\nopenstack/configmaps/ceph-csi-config\nmonitoring/secrets/prometheus-tls\nmonitoring/secrets/sh.helm.release.v1.goldpinger.v1\nopenstack/configmaps/ceph-etc\nmonitoring/secrets/sh.helm.release.v1.kube-prometheus-stack.v1\nopenstack/configmaps/cinder-bin\nmonitoring/secrets/sh.helm.release.v1.loki.v1\nopenstack/configmaps/glance-bin\nmonitoring/secrets/sh.helm.release.v1.node-feature-discovery.v1\nopenstack/configmaps/heat-bin\nmonitoring/secrets/sh.helm.release.v1.prometheus-pushgateway.v1\nopenstack/configmaps/horizon-bin\nmonitoring/secrets/sh.helm.release.v1.vector.v1\nopenstack/configmaps/keepalived-bin\nopenstack/configmaps/keystone-bin\nopenstack/configmaps/keystone-openid-metadata\nopenstack/configmaps/kube-root-ca.crt\nopenstack/configmaps/libvirt-bin\nopenstack/configmaps/libvirt-libvirt-default-bin\nopenstack/configmaps/magnum-bin\nopenstack/configmaps/magnum-cluster-api-proxy-config\nopenstack/configmaps/manila-bin\nopenstack/configmaps/memcached-memcached-bin\nopenstack/configmaps/neutron-bin\nopenstack/configmaps/nova-bin\nopenstack/configmaps/octavia-bin\nopenstack/configmaps/openvswitch-bin\nopenstack/configmaps/ovn-bin\nopenstack/configmaps/percona-xtradb-haproxy\nopenstack/configmaps/percona-xtradb-pxc\nopenstack/configmaps/placement-bin\nopenstack/configmaps/rabbitmq-barbican-plugins-conf\nopenstack/configmaps/rabbitmq-barbican-server-conf\nopenstack/configmaps/rabbitmq-cinder-plugins-conf\nopenstack/configmaps/rabbitmq-cinder-server-conf\nopenstack/configmaps/rabbitmq-glance-plugins-conf\nopenstack/configmaps/rabbitmq-glance-server-conf\nopenstack/configmaps/rabbitmq-heat-plugins-conf\nopenstack/configmaps/rabbitmq-heat-server-conf\nopenstack/configmaps/rabbitmq-keystone-plugins-conf\nopenstack/configmaps/rabbitmq-keystone-server-conf\nopenstack/configmaps/rabbitmq-magnum-plugins-conf\nopenstack/configmaps/rabbitmq-magnum-server-conf\nopenstack/configmaps/rabbitmq-manila-plugins-conf\nopenstack/configmaps/rabbitmq-manila-server-conf\nopenstack/configmaps/rabbitmq-neutron-plugins-conf\nopenstack/configmaps/rabbitmq-neutron-server-conf\nopenstack/configmaps/rabbitmq-nova-plugins-conf\nopenstack/configmaps/rabbitmq-nova-server-conf\nopenstack/configmaps/rabbitmq-octavia-plugins-conf\nopenstack/configmaps/rabbitmq-octavia-server-conf\nopenstack/configmaps/rook-ceph-mon-endpoints\nopenstack/configmaps/rook-ceph-pdbstatemap\nopenstack/configmaps/rook-ceph-rgw-ceph-mime-types\nopenstack/configmaps/rook-config-override\nopenstack/configmaps/staffeln-bin\nopenstack/configmaps/tempest-bin\nmonitoring/serviceaccounts/default\nopenstack/configmaps/valkey-configuration\nmonitoring/serviceaccounts/goldpinger\nopenstack/configmaps/valkey-health\nopenstack/configmaps/valkey-scripts\nmonitoring/serviceaccounts/kube-prometheus-stack-alertmanager\nmonitoring/serviceaccounts/kube-prometheus-stack-grafana\nmonitoring/serviceaccounts/kube-prometheus-stack-kube-state-metrics\nmonitoring/serviceaccounts/kube-prometheus-stack-operator\nmonitoring/serviceaccounts/kube-prometheus-stack-prometheus\nmonitoring/serviceaccounts/kube-prometheus-stack-prometheus-node-exporter\nmonitoring/serviceaccounts/loki\nmonitoring/serviceaccounts/node-feature-discovery\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nmonitoring/serviceaccounts/node-feature-discovery-gc\nmonitoring/serviceaccounts/node-feature-discovery-worker\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nmonitoring/serviceaccounts/prometheus-pushgateway\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nmonitoring/serviceaccounts/vector\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/cronjobs/cinder-volume-usage-audit\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/cronjobs/heat-engine-cleaner\nopenstack/cronjobs/heat-purge-deleted\nopenstack/cronjobs/keystone-credential-rotate\nopenstack/cronjobs/keystone-fernet-rotate\nopenstack/cronjobs/nova-cell-setup\nopenstack/cronjobs/nova-service-cleaner\nmonitoring/services/alertmanager-operated\nmonitoring/services/goldpinger\nmonitoring/services/kube-prometheus-stack-alertmanager\nmonitoring/services/kube-prometheus-stack-grafana\nmonitoring/services/kube-prometheus-stack-kube-state-metrics\nmonitoring/services/kube-prometheus-stack-operator\nmonitoring/services/kube-prometheus-stack-prometheus\nmonitoring/services/kube-prometheus-stack-prometheus-node-exporter\nmonitoring/services/loki\nmonitoring/services/loki-chunks-cache\nmonitoring/services/loki-gateway\nmonitoring/services/loki-headless\nmonitoring/services/loki-memberlist\nmonitoring/services/loki-results-cache\nmonitoring/services/prometheus-operated\nmonitoring/services/prometheus-pushgateway\nmonitoring/services/vector-headless\nmonitoring/statefulsets/alertmanager-kube-prometheus-stack-alertmanager\nmonitoring/statefulsets/loki\nmonitoring/statefulsets/loki-chunks-cache\nmonitoring/statefulsets/loki-results-cache\nmonitoring/statefulsets/prometheus-kube-prometheus-stack-prometheus\nopenstack/daemonsets/keepalived\nopenstack/daemonsets/libvirt-libvirt-default\nopenstack/daemonsets/magnum-cluster-api-proxy\nopenstack/daemonsets/neutron-netns-cleanup-cron-default\nopenstack/daemonsets/neutron-ovn-metadata-agent-default\nopenstack/daemonsets/nova-compute-default\nopenstack/daemonsets/octavia-health-manager-default\nopenstack/daemonsets/openvswitch\nopenstack/daemonsets/ovn-controller\nopenstack/deployment/barbican-api\nopenstack/deployment/cinder-api\nopenstack/deployment/cinder-backup\nopenstack/deployment/cinder-scheduler\norc-system/configmaps/kube-root-ca.crt\nopenstack/deployment/cinder-volume\nopenstack/deployment/glance-api\nopenstack/deployment/heat-api\nopenstack/deployment/heat-cfn\nopenstack/deployment/heat-engine\nopenstack/deployment/horizon\nopenstack/deployment/keystone-api\nopenstack/deployment/magnum-api\nopenstack/deployment/magnum-registry\nopenstack/deployment/manila-api\nopenstack/deployment/manila-data\nopenstack/deployment/manila-scheduler\nopenstack/deployment/manila-share\nopenstack/deployment/memcached-memcached\nopenstack/deployment/neutron-server\nopenstack/deployment/nova-api-metadata\nopenstack/deployment/nova-api-osapi\nopenstack/deployment/nova-conductor\nopenstack/deployment/nova-novncproxy\nopenstack/deployment/nova-scheduler\nopenstack/deployment/octavia-api\nopenstack/deployment/octavia-housekeeping\nopenstack/deployment/octavia-worker\nopenstack/deployment/openstack-database-exporter\nopenstack/deployment/openstack-exporter\nopenstack/deployment/ovn-northd\nopenstack/deployment/placement-api\nopenstack/deployment/pxc-operator\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/deployment/rabbitmq-cluster-operator\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/deployment/rabbitmq-messaging-topology-operator\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/deployment/rook-ceph-crashcollector-instance\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/deployment/rook-ceph-rgw-ceph-a\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/deployment/staffeln-api\nopenstack/deployment/staffeln-conductor\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/barbican-api\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/ceph-mon\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/cinder-api\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/glance-api\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/heat-api\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/heat-cfn\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/horizon\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/horizon-int\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/keystone-api\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/magnum-api\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/magnum-registry\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/manila-api\nerror: the server doesn't have a resource type \"podsecuritypolicies\"\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/memcached\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/memcached-metrics\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/neutron-server\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/nova-api\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/nova-metadata\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/nova-novncproxy\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/octavia-api\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/openstack-exporter\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/ovn-ovsdb-nb\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/ovn-ovsdb-sb\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/percona-xtradb-cluster-operator\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/percona-xtradb-haproxy\nxargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value\nopenstack/endpoints/percona-xtradb-haproxy-metrics\nopenstack/endpoints/percona-xtradb-haproxy-replicas\nopenstack/endpoints/percona-xtradb-pxc\nopenstack/endpoints/percona-xtradb-pxc-unready\nopenstack/endpoints/placement-api\nopenstack/endpoints/rabbitmq-barbican\nopenstack/endpoints/rabbitmq-barbican-nodes\nopenstack/endpoints/rabbitmq-cinder\nopenstack/endpoints/rabbitmq-cinder-nodes\nopenstack/endpoints/rabbitmq-glance\nopenstack/endpoints/rabbitmq-glance-nodes\nopenstack/endpoints/rabbitmq-heat\nopenstack/endpoints/rabbitmq-heat-nodes\nopenstack/endpoints/rabbitmq-keystone\nopenstack/endpoints/rabbitmq-keystone-nodes\nopenstack/endpoints/rabbitmq-magnum\nopenstack/ingresses/cloudformation\nopenstack/endpoints/rabbitmq-magnum-nodes\nopenstack/ingresses/compute\nopenstack/endpoints/rabbitmq-manila\nopenstack/ingresses/compute-novnc-proxy\nopenstack/endpoints/rabbitmq-manila-nodes\nopenstack/ingresses/container-infra\nopenstack/ingresses/container-infra-registry\nopenstack/endpoints/rabbitmq-messaging-topology-operator-webhook\nopenstack/ingresses/dashboard\nopenstack/endpoints/rabbitmq-neutron\nopenstack/ingresses/identity\nopenstack/endpoints/rabbitmq-neutron-nodes\nopenstack/ingresses/image\nopenstack/endpoints/rabbitmq-nova\nopenstack/ingresses/key-manager\nopenstack/endpoints/rabbitmq-nova-nodes\nopenstack/endpoints/rabbitmq-octavia\nopenstack/ingresses/load-balancer\nopenstack/endpoints/rabbitmq-octavia-nodes\nopenstack/ingresses/network\nopenstack/ingresses/orchestration\nopenstack/endpoints/rook-ceph-rgw-ceph\nopenstack/endpoints/staffeln-api\nopenstack/ingresses/placement\nopenstack/endpoints/valkey\nopenstack/ingresses/rook-ceph-cluster\nopenstack/ingresses/sharev2\nopenstack/endpoints/valkey-headless\nopenstack/ingresses/volumev3\nopenstack/endpoints/valkey-metrics\norc-system/endpoints/orc-controller-manager-metrics-service\norc-system/deployment/orc-controller-manager\nopenstack/networkpolicies/rabbitmq-cluster-operator\nopenstack/networkpolicies/rabbitmq-messaging-topology-operator\nopenstack/networkpolicies/valkey\nopenstack/jobs/barbican-db-init\nopenstack/jobs/barbican-db-sync\nopenstack/jobs/barbican-ks-endpoints\nopenstack/jobs/barbican-ks-service\nopenstack/jobs/barbican-ks-user\nopenstack/jobs/barbican-rabbit-init\nopenstack/jobs/cinder-backup-storage-init\nopenstack/jobs/cinder-bootstrap\nopenstack/jobs/cinder-create-internal-tenant\nopenstack/jobs/cinder-db-init\nopenstack/jobs/cinder-db-sync\nopenstack/jobs/cinder-ks-endpoints\nopenstack/jobs/cinder-ks-service\nopenstack/jobs/cinder-ks-user\nopenstack/jobs/cinder-rabbit-init\nopenstack/jobs/cinder-storage-init\nopenstack/jobs/cinder-volume-usage-audit-29540045\nopenstack/jobs/glance-db-init\nopenstack/jobs/glance-db-sync\nopenstack/jobs/glance-ks-endpoints\nopenstack/jobs/glance-ks-service\nopenstack/jobs/glance-ks-user\nopenstack/jobs/glance-metadefs-load\nopenstack/jobs/glance-rabbit-init\nopenstack/jobs/glance-storage-init\nopenstack/jobs/heat-bootstrap\nopenstack/jobs/heat-db-init\nopenstack/jobs/heat-db-sync\nopenstack/jobs/heat-domain-ks-user\nopenstack/jobs/heat-engine-cleaner-29540055\nopenstack/jobs/heat-engine-cleaner-29540060\nopenstack/jobs/heat-engine-cleaner-29540065\nopenstack/jobs/heat-ks-endpoints\nopenstack/jobs/heat-ks-service\nopenstack/jobs/heat-ks-user\nopenstack/jobs/heat-rabbit-init\nopenstack/jobs/heat-trusts\nopenstack/jobs/horizon-db-init\nopenstack/jobs/horizon-db-sync\nopenstack/jobs/keystone-bootstrap\nopenstack/jobs/keystone-credential-setup\nopenstack/jobs/keystone-db-init\nopenstack/jobs/keystone-db-sync\nopenstack/jobs/keystone-domain-manage\nopenstack/jobs/keystone-fernet-setup\nopenstack/jobs/keystone-rabbit-init\nopenstack/jobs/magnum-db-init\nopenstack/jobs/magnum-db-sync\nopenstack/jobs/magnum-domain-ks-user\nopenstack/jobs/magnum-ks-endpoints\nopenstack/jobs/magnum-ks-service\nopenstack/jobs/magnum-ks-user\nopenstack/jobs/magnum-rabbit-init\nopenstack/jobs/manila-bootstrap\nopenstack/jobs/manila-db-init\nopenstack/jobs/manila-db-sync\nopenstack/jobs/manila-ks-endpoints\nopenstack/jobs/manila-ks-service\nopenstack/jobs/manila-ks-user\nopenstack/jobs/manila-rabbit-init\nopenstack/jobs/neutron-db-init\nopenstack/jobs/neutron-db-sync\nopenstack/jobs/neutron-ks-endpoints\nopenstack/jobs/neutron-ks-service\nopenstack/jobs/neutron-ks-user\nopenstack/jobs/neutron-rabbit-init\nopenstack/jobs/nova-cell-setup-29540040\nopenstack/jobs/nova-db-init\nopenstack/jobs/nova-db-sync\nopenstack/jobs/nova-ks-endpoints\nopenstack/jobs/nova-ks-service\nopenstack/jobs/nova-ks-user\nopenstack/jobs/nova-rabbit-init\nopenstack/jobs/nova-service-cleaner-29540040\nopenstack/jobs/octavia-bootstrap\nopenstack/jobs/octavia-db-init\nopenstack/jobs/octavia-db-sync\nopenstack/jobs/octavia-ks-endpoints\nopenstack/jobs/octavia-ks-service\nopenstack/jobs/octavia-ks-user\nopenstack/jobs/octavia-rabbit-init\nopenstack/jobs/placement-db-init\nopenstack/jobs/placement-db-sync\nopenstack/jobs/placement-ks-endpoints\nopenstack/jobs/placement-ks-service\nopenstack/jobs/placement-ks-user\nopenstack/jobs/staffeln-db-init\nopenstack/jobs/staffeln-db-sync\nopenstack/jobs/tempest-ks-user\nopenstack/jobs/tempest-run-tests\nopenstack/pods/barbican-api-775987496d-z6jqv\nopenstack/pods/barbican-db-init-nm8k6\nopenstack/pods/barbican-db-sync-452x5\nopenstack/pods/barbican-ks-endpoints-w2ffg\nopenstack/pods/barbican-ks-service-8pm7j\nopenstack/pods/barbican-ks-user-fszfr\nopenstack/pods/barbican-rabbit-init-j5qmd\nopenstack/pods/cinder-api-86d7694f66-j97gj\nopenstack/pods/cinder-backup-dcfd7dfb7-sdwkc\nopenstack/pods/cinder-backup-storage-init-zmnkh\nopenstack/pods/cinder-bootstrap-wng86\nopenstack/pods/cinder-create-internal-tenant-6vgll\nopenstack/pods/cinder-db-init-mzm5b\nopenstack/pods/cinder-db-sync-mz6ls\nopenstack/pods/cinder-ks-endpoints-xv2tb\nopenstack/pods/cinder-ks-service-dlcxz\nopenstack/pods/cinder-ks-user-5bd5g\nopenstack/pods/cinder-rabbit-init-l4fpm\nopenstack/pods/cinder-scheduler-586f444995-p7grf\nopenstack/pods/cinder-storage-init-vt6br\nopenstack/pods/cinder-volume-66dc847979-qgp4l\nopenstack/pods/cinder-volume-usage-audit-29540045-jbmvh\nopenstack/pods/glance-api-65d579bfc8-6x76l\nopenstack/pods/glance-db-init-wbpff\nopenstack/pods/glance-db-sync-gk84f\nopenstack/pods/glance-ks-endpoints-dq2cc\nopenstack/pods/glance-ks-service-5h6bw\nopenstack/pods/glance-ks-user-lcfxr\nopenstack/pods/glance-metadefs-load-476tp\nopenstack/pods/glance-rabbit-init-c6rjt\nopenstack/pods/glance-storage-init-hdcpc\nopenstack/pods/heat-api-6d65f9477-kmbkl\nopenstack/pods/heat-bootstrap-9dwg2\nopenstack/pods/heat-cfn-f44db7787-t8f7m\nopenstack/pods/heat-db-init-fk8qw\nopenstack/pods/heat-db-sync-cxmcb\nopenstack/pods/heat-domain-ks-user-tq2c5\nopenstack/pods/heat-engine-64f8b77bfb-wngkr\nopenstack/pods/heat-engine-cleaner-29540055-p9pq9\nopenstack/pods/heat-engine-cleaner-29540060-z4g95\nopenstack/pods/heat-engine-cleaner-29540065-rcjr2\nopenstack/pods/heat-ks-endpoints-wwzbz\nopenstack/pods/heat-ks-service-8pxqz\nopenstack/pods/heat-ks-user-tfk98\nopenstack/pods/heat-rabbit-init-rbl9n\nopenstack/persistentvolumeclaims/data-ovn-ovsdb-nb-0\nopenstack/pods/heat-trusts-czrrv\nopenstack/persistentvolumeclaims/data-ovn-ovsdb-sb-0\nopenstack/pods/horizon-8cdd7b888-bvzvx\nopenstack/persistentvolumeclaims/datadir-percona-xtradb-pxc-0\nopenstack/pods/horizon-db-init-s5pbw\nopenstack/persistentvolumeclaims/persistence-rabbitmq-barbican-server-0\nopenstack/pods/horizon-db-sync-bgr2g\nopenstack/persistentvolumeclaims/persistence-rabbitmq-cinder-server-0\nopenstack/pods/keepalived-7jdfz\nopenstack/persistentvolumeclaims/persistence-rabbitmq-glance-server-0\nopenstack/persistentvolumeclaims/persistence-rabbitmq-heat-server-0\nopenstack/pods/keystone-api-c4656754c-mqbxm\nopenstack/pods/keystone-bootstrap-mdtrx\nopenstack/persistentvolumeclaims/persistence-rabbitmq-keystone-server-0\nopenstack/pods/keystone-credential-setup-6xsvx\nopenstack/persistentvolumeclaims/persistence-rabbitmq-magnum-server-0\nopenstack/pods/keystone-db-init-z5mwz\nopenstack/persistentvolumeclaims/persistence-rabbitmq-manila-server-0\nopenstack/pods/keystone-db-sync-zsq8z\nopenstack/pods/keystone-domain-manage-v865d\nopenstack/persistentvolumeclaims/persistence-rabbitmq-neutron-server-0\nopenstack/pods/keystone-fernet-setup-5rfqs\nopenstack/persistentvolumeclaims/persistence-rabbitmq-nova-server-0\nopenstack/pods/keystone-rabbit-init-m44qz\nopenstack/pods/libvirt-libvirt-default-6bgrg\nopenstack/persistentvolumeclaims/persistence-rabbitmq-octavia-server-0\nopenstack/pods/magnum-api-8549df7884-9b2zc\nopenstack/persistentvolumeclaims/valkey-data-valkey-node-0\nopenstack/pods/magnum-cluster-api-proxy-z2flh\nopenstack/pods/magnum-conductor-0\nopenstack/pods/magnum-db-init-dshrc\nopenstack/pods/magnum-db-sync-8ttpk\nopenstack/pods/magnum-domain-ks-user-vp8f2\nopenstack/pods/magnum-ks-endpoints-jvzvf\nopenstack/pods/magnum-ks-service-vdn67\nopenstack/pods/magnum-ks-user-4wvtj\nopenstack/pods/magnum-rabbit-init-w7jc7\nopenstack/pods/magnum-registry-c45778976-2zz96\nopenstack/pods/manila-api-5cdf958bd9-hmbmb\nopenstack/pods/manila-bootstrap-5wn97\nopenstack/pods/manila-data-75cbc955bd-27jjw\nopenstack/pods/manila-db-init-pbdm8\nopenstack/pods/manila-db-sync-rm9mz\nopenstack/pods/manila-ks-endpoints-d8nr9\nopenstack/pods/manila-ks-service-g7svt\nopenstack/pods/manila-ks-user-pr9mg\nopenstack/pods/manila-rabbit-init-74vjs\nopenstack/pods/manila-scheduler-5b584c8656-mmnnd\nopenstack/pods/manila-share-68879775b-rc6q9\nopenstack/pods/memcached-memcached-6479589586-9sxjx\nopenstack/pods/neutron-db-init-l7c9v\nopenstack/pods/neutron-db-sync-brwb5\nopenstack/pods/neutron-ks-endpoints-dstkg\nopenstack/pods/neutron-ks-service-sq4tp\nopenstack/pods/neutron-ks-user-kcfc4\nopenstack/pods/neutron-netns-cleanup-cron-default-8frwf\nopenstack/pods/neutron-ovn-metadata-agent-default-flhb5\nopenstack/pods/neutron-rabbit-init-rdnbf\nopenstack/pods/neutron-server-649c5974f6-5dkvl\nopenstack/pods/nova-api-metadata-546d94ddd7-btnrc\nopenstack/pods/nova-api-osapi-99c7b7cd8-2lnzr\nopenstack/pods/nova-bootstrap-trzqq\nopenstack/pods/nova-cell-setup-29540040-rtzd7\nopenstack/pods/nova-cell-setup-j97qh\nopenstack/pods/nova-compute-default-2v5pd\nopenstack/pods/nova-conductor-5474cb4b8d-bxzhq\nopenstack/pods/nova-db-init-b4sqh\nopenstack/pods/nova-db-sync-2rbjc\nopenstack/pods/nova-ks-endpoints-zwcm6\nopenstack/pods/nova-ks-service-fmj77\nopenstack/pods/nova-ks-user-t8xgz\nopenstack/pods/nova-novncproxy-85dd5b5965-z6hmj\nopenstack/pods/nova-rabbit-init-szpvx\nopenstack/pods/nova-scheduler-78775555d4-hb2j9\nopenstack/pods/nova-service-cleaner-29540040-cxdd4\nopenstack/pods/octavia-api-75db6578cf-m656r\nopenstack/pods/octavia-bootstrap-kwfv2\nopenstack/pods/octavia-db-init-wnz5h\nopenstack/pods/octavia-db-sync-rjq45\nopenstack/pods/octavia-health-manager-default-twmks\nopenstack/pods/octavia-housekeeping-87b98c47b-vqwct\nopenstack/pods/octavia-ks-endpoints-jdlzw\nopenstack/pods/octavia-ks-service-rkdp9\nopenstack/pods/octavia-ks-user-tjl52\nopenstack/pods/octavia-rabbit-init-vdqxf\nopenstack/pods/octavia-worker-774cddbcdc-qxl6k\nopenstack/pods/openstack-database-exporter-7c944bc9f-w2bdb\nopenstack/pods/openstack-exporter-74676fb4b4-jrkwh\nopenstack/pods/openvswitch-gj98d\nopenstack/pods/ovn-controller-6mbd4\nopenstack/pods/ovn-northd-6c6687ddd6-7grhs\nopenstack/pods/ovn-ovsdb-nb-0\nopenstack/pods/ovn-ovsdb-sb-0\nopenstack/pods/percona-xtradb-haproxy-0\nopenstack/pods/percona-xtradb-pxc-0\nopenstack/pods/placement-api-75695696c6-brsxj\nopenstack/pods/placement-db-init-89t92\nopenstack/pods/placement-db-sync-nvqjv\nopenstack/pods/placement-ks-endpoints-jmfl7\nopenstack/pods/placement-ks-service-qdjdz\nopenstack/pods/placement-ks-user-blkn9\nopenstack/pods/pxc-operator-7cff949c8b-7zp4j\nopenstack/pods/rabbitmq-barbican-server-0\nopenstack/rolebindings/barbican-barbican-api\nopenstack/pods/rabbitmq-cinder-server-0\nopenstack/rolebindings/barbican-barbican-db-init\nopenstack/pods/rabbitmq-cluster-operator-5448d56d95-vk9km\nopenstack/rolebindings/barbican-barbican-db-sync\nopenstack/pods/rabbitmq-glance-server-0\nopenstack/rolebindings/barbican-barbican-ks-endpoints\nopenstack/rolebindings/barbican-barbican-ks-service\nopenstack/pods/rabbitmq-heat-server-0\nopenstack/rolebindings/barbican-barbican-ks-user\nopenstack/pods/rabbitmq-keystone-server-0\nopenstack/rolebindings/barbican-barbican-rabbit-init\nopenstack/pods/rabbitmq-magnum-server-0\nopenstack/rolebindings/cinder-backup-storage-init\nopenstack/pods/rabbitmq-manila-server-0\nopenstack/pods/rabbitmq-messaging-topology-operator-7f8596f788-84l9x\nopenstack/rolebindings/cinder-cinder-api\nopenstack/pods/rabbitmq-neutron-server-0\nopenstack/rolebindings/cinder-cinder-backup\nopenstack/pods/rabbitmq-nova-server-0\nopenstack/pods/rabbitmq-octavia-server-0\nopenstack/rolebindings/cinder-cinder-bootstrap\nopenstack/pods/rook-ceph-crashcollector-instance-754c646bfd-htxl9\nopenstack/rolebindings/cinder-cinder-create-internal-tenant\nopenstack/pods/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw\nopenstack/rolebindings/cinder-cinder-db-init\nopenstack/pods/staffeln-api-6669c8779f-qgp4c\nopenstack/rolebindings/cinder-cinder-db-sync\nopenstack/pods/staffeln-conductor-7b5d99bcd4-ws4sl\nopenstack/rolebindings/cinder-cinder-ks-endpoints\nopenstack/pods/staffeln-db-init-p4pq4\nopenstack/rolebindings/cinder-cinder-ks-service\nopenstack/pods/staffeln-db-sync-khzx8\nopenstack/pods/tempest-ks-user-kwbf6\nopenstack/rolebindings/cinder-cinder-ks-user\nopenstack/pods/tempest-run-tests-g5plh\nopenstack/rolebindings/cinder-cinder-rabbit-init\nopenstack/rolebindings/cinder-cinder-scheduler\nopenstack/pods/valkey-node-0\nopenstack/rolebindings/cinder-cinder-test\nopenstack/rolebindings/cinder-cinder-volume\nopenstack/rolebindings/cinder-cinder-volume-usage-audit\nopenstack/rolebindings/cinder-storage-init\nopenstack/rolebindings/glance-glance-api\nopenstack/rolebindings/glance-glance-db-init\nopenstack/rolebindings/glance-glance-db-sync\nopenstack/rolebindings/glance-glance-ks-endpoints\nopenstack/rolebindings/glance-glance-ks-service\nopenstack/rolebindings/glance-glance-ks-user\nopenstack/rolebindings/glance-glance-metadefs-load\nopenstack/rolebindings/glance-glance-rabbit-init\nopenstack/rolebindings/glance-glance-storage-init\nopenstack/rolebindings/glance-glance-test\nopenstack/rolebindings/glance-storage-init\nopenstack/rolebindings/heat-heat-api\nopenstack/rolebindings/heat-heat-bootstrap\nopenstack/rolebindings/heat-heat-cfn\nopenstack/rolebindings/heat-heat-db-init\nopenstack/rolebindings/heat-heat-db-sync\nopenstack/rolebindings/heat-heat-engine\nopenstack/rolebindings/heat-heat-engine-cleaner\nopenstack/rolebindings/heat-heat-ks-endpoints\nopenstack/rolebindings/heat-heat-ks-service\nopenstack/rolebindings/heat-heat-ks-user\nopenstack/rolebindings/heat-heat-ks-user-domain\nopenstack/rolebindings/heat-heat-purge-deleted\nopenstack/rolebindings/heat-heat-rabbit-init\nopenstack/rolebindings/heat-heat-test\nopenstack/rolebindings/heat-heat-trusts\nopenstack/rolebindings/horizon-horizon\nopenstack/rolebindings/horizon-horizon-db-init\nopenstack/rolebindings/horizon-horizon-db-sync\nopenstack/rolebindings/horizon-horizon-test\nopenstack/rolebindings/keepalived\nopenstack/rolebindings/keystone-credential-rotate\nopenstack/rolebindings/keystone-credential-setup\nopenstack/rolebindings/keystone-fernet-rotate\nopenstack/rolebindings/keystone-fernet-setup\nopenstack/rolebindings/keystone-keystone-api\nopenstack/rolebindings/keystone-keystone-bootstrap\nopenstack/rolebindings/keystone-keystone-credential-rotate\nopenstack/rolebindings/keystone-keystone-db-init\nopenstack/rolebindings/keystone-keystone-db-sync\nopenstack/rolebindings/keystone-keystone-domain-manage\nopenstack/rolebindings/keystone-keystone-fernet-rotate\nopenstack/rolebindings/keystone-keystone-test\nopenstack/rolebindings/libvirt-cert-manager\nopenstack/rolebindings/libvirt-libvirt\nopenstack/rolebindings/magnum-magnum-api\nopenstack/rolebindings/magnum-magnum-conductor\nopenstack/rolebindings/magnum-magnum-db-init\nopenstack/rolebindings/magnum-magnum-db-sync\nopenstack/rolebindings/magnum-magnum-ks-endpoints\nopenstack/rolebindings/magnum-magnum-ks-service\nopenstack/rolebindings/magnum-magnum-ks-user\nopenstack/rolebindings/magnum-magnum-ks-user-domain\nopenstack/rolebindings/magnum-magnum-rabbit-init\nopenstack/rolebindings/manila-manila-api\nopenstack/rolebindings/manila-manila-data\nopenstack/rolebindings/manila-manila-db-init\nopenstack/rolebindings/manila-manila-db-sync\nopenstack/rolebindings/manila-manila-ks-endpoints\nopenstack/rolebindings/manila-manila-ks-service\nopenstack/rolebindings/manila-manila-ks-user\nopenstack/rolebindings/manila-manila-rabbit-init\nopenstack/rolebindings/manila-manila-scheduler\nopenstack/rolebindings/manila-manila-share\nopenstack/rolebindings/neutron-neutron-db-init\nopenstack/rolebindings/neutron-neutron-db-sync\nopenstack/rolebindings/neutron-neutron-ks-endpoints\nopenstack/rolebindings/neutron-neutron-ks-service\nopenstack/rolebindings/neutron-neutron-ks-user\nopenstack/rolebindings/neutron-neutron-ovn-metadata-agent\nopenstack/rolebindings/neutron-neutron-rabbit-init\nopenstack/rolebindings/neutron-neutron-server\nopenstack/rolebindings/neutron-neutron-test\nopenstack/rolebindings/nova-nova-api-metadata\nopenstack/rolebindings/nova-nova-api-osapi\nopenstack/rolebindings/nova-nova-bootstrap\norc-system/pods/orc-controller-manager-6cb597b5d4-glhcz\nopenstack/rolebindings/nova-nova-cell-setup\nopenstack/rolebindings/nova-nova-cell-setup-cron\nopenstack/rolebindings/nova-nova-compute\nopenstack/rolebindings/nova-nova-conductor\nopenstack/rolebindings/nova-nova-db-init\nopenstack/rolebindings/nova-nova-db-sync\nopenstack/rolebindings/nova-nova-ks-endpoints\nopenstack/rolebindings/nova-nova-ks-service\nopenstack/rolebindings/nova-nova-ks-user\nopenstack/rolebindings/nova-nova-novncproxy\nopenstack/rolebindings/nova-nova-rabbit-init\nopenstack/rolebindings/nova-nova-scheduler\nopenstack/rolebindings/nova-nova-service-cleaner\nopenstack/rolebindings/nova-nova-test\nopenstack/rolebindings/octavia-octavia-api\nopenstack/rolebindings/octavia-octavia-db-init\nopenstack/rolebindings/octavia-octavia-db-sync\nopenstack/rolebindings/octavia-octavia-health-manager\nopenstack/rolebindings/octavia-octavia-housekeeping\nopenstack/rolebindings/octavia-octavia-ks-endpoints\nopenstack/rolebindings/octavia-octavia-ks-service\nopenstack/rolebindings/octavia-octavia-ks-user\nopenstack/rolebindings/octavia-octavia-rabbit-init\nopenstack/rolebindings/octavia-octavia-worker\nopenstack/rolebindings/ovn-controller\nopenstack/rolebindings/ovn-northd\nopenstack/rolebindings/ovn-ovn-controller\nopenstack/rolebindings/ovn-ovn-northd\nopenstack/rolebindings/ovn-ovsdb\nopenstack/rolebindings/placement-placement-api\nopenstack/rolebindings/placement-placement-db-init\nopenstack/rolebindings/placement-placement-db-sync\nopenstack/rolebindings/placement-placement-ks-endpoints\nopenstack/rolebindings/placement-placement-ks-service\nopenstack/rolebindings/placement-placement-ks-user\nopenstack/rolebindings/pxc-operator\nopenstack/rolebindings/rabbitmq-barbican-server\nopenstack/rolebindings/rabbitmq-cinder-server\nopenstack/rolebindings/rabbitmq-cluster-operator\nopenstack/rolebindings/rabbitmq-glance-server\nopenstack/rolebindings/rabbitmq-heat-server\nopenstack/rolebindings/rabbitmq-keystone-server\nopenstack/rolebindings/rabbitmq-magnum-server\nopenstack/rolebindings/rabbitmq-manila-server\nopenstack/rolebindings/rabbitmq-messaging-topology-operator\nopenstack/rolebindings/rabbitmq-neutron-server\nopenstack/rolebindings/rabbitmq-nova-server\nopenstack/rolebindings/rabbitmq-octavia-server\nopenstack/rolebindings/rook-ceph-cluster-mgmt\nopenstack/rolebindings/rook-ceph-cmd-reporter\nopenstack/rolebindings/rook-ceph-mgr\nopenstack/rolebindings/rook-ceph-osd\nopenstack/rolebindings/rook-ceph-purge-osd\nopenstack/rolebindings/staffeln-conductor-leases\nopenstack/rolebindings/staffeln-staffeln-api\nopenstack/rolebindings/staffeln-staffeln-conductor\nopenstack/rolebindings/staffeln-staffeln-db-init\nopenstack/rolebindings/staffeln-staffeln-db-sync\nopenstack/rolebindings/tempest-tempest-ks-user\nopenstack/rolebindings/tempest-tempest-run-tests\nopenstack/roles/barbican-openstack-barbican-api\nopenstack/roles/barbican-openstack-barbican-db-init\nopenstack/roles/barbican-openstack-barbican-db-sync\nopenstack/roles/barbican-openstack-barbican-ks-endpoints\nopenstack/roles/barbican-openstack-barbican-ks-service\nopenstack/roles/barbican-openstack-barbican-ks-user\nopenstack/roles/barbican-openstack-barbican-rabbit-init\nopenstack/roles/cinder-backup-storage-init\nopenstack/roles/cinder-openstack-cinder-api\nopenstack/roles/cinder-openstack-cinder-backup\nopenstack/roles/cinder-openstack-cinder-bootstrap\nopenstack/roles/cinder-openstack-cinder-create-internal-tenant\nopenstack/roles/cinder-openstack-cinder-db-init\nopenstack/roles/cinder-openstack-cinder-db-sync\nopenstack/roles/cinder-openstack-cinder-ks-endpoints\nopenstack/roles/cinder-openstack-cinder-ks-service\nopenstack/roles/cinder-openstack-cinder-ks-user\nopenstack/roles/cinder-openstack-cinder-rabbit-init\nopenstack/roles/cinder-openstack-cinder-scheduler\nopenstack/roles/cinder-openstack-cinder-test\nopenstack/roles/cinder-openstack-cinder-volume\nopenstack/roles/cinder-openstack-cinder-volume-usage-audit\nopenstack/roles/cinder-storage-init\nopenstack/roles/glance-openstack-glance-api\nopenstack/roles/glance-openstack-glance-db-init\nopenstack/roles/glance-openstack-glance-db-sync\nopenstack/roles/glance-openstack-glance-ks-endpoints\nopenstack/roles/glance-openstack-glance-ks-service\nopenstack/roles/glance-openstack-glance-ks-user\nopenstack/roles/glance-openstack-glance-metadefs-load\nopenstack/roles/glance-openstack-glance-rabbit-init\nopenstack/roles/glance-openstack-glance-storage-init\nopenstack/roles/glance-openstack-glance-test\nopenstack/roles/glance-storage-init\nopenstack/roles/heat-openstack-heat-api\nopenstack/roles/heat-openstack-heat-bootstrap\nopenstack/roles/heat-openstack-heat-cfn\nopenstack/roles/heat-openstack-heat-db-init\nopenstack/roles/heat-openstack-heat-db-sync\nopenstack/roles/heat-openstack-heat-engine\nopenstack/roles/heat-openstack-heat-engine-cleaner\nopenstack/roles/heat-openstack-heat-ks-endpoints\nopenstack/roles/heat-openstack-heat-ks-service\nopenstack/roles/heat-openstack-heat-ks-user\nopenstack/roles/heat-openstack-heat-ks-user-domain\nopenstack/roles/heat-openstack-heat-purge-deleted\nopenstack/roles/heat-openstack-heat-rabbit-init\nopenstack/roles/heat-openstack-heat-test\nopenstack/roles/heat-openstack-heat-trusts\nopenstack/roles/horizon-openstack-horizon\nopenstack/roles/horizon-openstack-horizon-db-init\nopenstack/roles/horizon-openstack-horizon-db-sync\nopenstack/roles/horizon-openstack-horizon-test\nopenstack/roles/keepalived\nopenstack/roles/keystone-credential-rotate\nopenstack/roles/keystone-credential-setup\nopenstack/roles/keystone-fernet-rotate\nopenstack/roles/keystone-fernet-setup\nopenstack/roles/keystone-openstack-keystone-api\nopenstack/roles/keystone-openstack-keystone-bootstrap\nopenstack/roles/keystone-openstack-keystone-credential-rotate\nopenstack/roles/keystone-openstack-keystone-db-init\nopenstack/roles/keystone-openstack-keystone-db-sync\nopenstack/roles/keystone-openstack-keystone-domain-manage\nopenstack/roles/keystone-openstack-keystone-fernet-rotate\nopenstack/roles/keystone-openstack-keystone-test\nopenstack/roles/libvirt-cert-manager\nopenstack/roles/libvirt-openstack-libvirt\nopenstack/roles/magnum-openstack-magnum-api\nopenstack/roles/magnum-openstack-magnum-conductor\nopenstack/roles/magnum-openstack-magnum-db-init\nopenstack/roles/magnum-openstack-magnum-db-sync\nopenstack/roles/magnum-openstack-magnum-ks-endpoints\nopenstack/roles/magnum-openstack-magnum-ks-service\nopenstack/roles/magnum-openstack-magnum-ks-user\nopenstack/roles/magnum-openstack-magnum-ks-user-domain\nopenstack/roles/magnum-openstack-magnum-rabbit-init\nopenstack/roles/manila-openstack-manila-api\nopenstack/roles/manila-openstack-manila-data\nopenstack/roles/manila-openstack-manila-db-init\nopenstack/roles/manila-openstack-manila-db-sync\nopenstack/roles/manila-openstack-manila-ks-endpoints\nopenstack/roles/manila-openstack-manila-ks-service\nopenstack/roles/manila-openstack-manila-ks-user\nopenstack/roles/manila-openstack-manila-rabbit-init\nopenstack/roles/manila-openstack-manila-scheduler\nopenstack/roles/manila-openstack-manila-share\nopenstack/roles/neutron-openstack-neutron-db-init\nopenstack/roles/neutron-openstack-neutron-db-sync\nopenstack/roles/neutron-openstack-neutron-ks-endpoints\nopenstack/roles/neutron-openstack-neutron-ks-service\nopenstack/roles/neutron-openstack-neutron-ks-user\nopenstack/roles/neutron-openstack-neutron-ovn-metadata-agent\nopenstack/roles/neutron-openstack-neutron-rabbit-init\nopenstack/roles/neutron-openstack-neutron-server\nopenstack/roles/neutron-openstack-neutron-test\nopenstack/roles/nova-openstack-nova-api-metadata\nopenstack/roles/nova-openstack-nova-api-osapi\nopenstack/roles/nova-openstack-nova-bootstrap\nopenstack/roles/nova-openstack-nova-cell-setup\nopenstack/roles/nova-openstack-nova-cell-setup-cron\nopenstack/roles/nova-openstack-nova-compute\nopenstack/roles/nova-openstack-nova-conductor\nopenstack/roles/nova-openstack-nova-db-init\nopenstack/roles/nova-openstack-nova-db-sync\nopenstack/roles/nova-openstack-nova-ks-endpoints\nopenstack/roles/nova-openstack-nova-ks-service\nopenstack/roles/nova-openstack-nova-ks-user\nopenstack/roles/nova-openstack-nova-novncproxy\nopenstack/roles/nova-openstack-nova-rabbit-init\nopenstack/roles/nova-openstack-nova-scheduler\nopenstack/roles/nova-openstack-nova-service-cleaner\nopenstack/roles/nova-openstack-nova-test\nopenstack/roles/octavia-openstack-octavia-api\nopenstack/roles/octavia-openstack-octavia-db-init\nopenstack/roles/octavia-openstack-octavia-db-sync\nopenstack/roles/octavia-openstack-octavia-health-manager\nopenstack/roles/octavia-openstack-octavia-housekeeping\nopenstack/roles/octavia-openstack-octavia-ks-endpoints\nopenstack/roles/octavia-openstack-octavia-ks-service\nopenstack/roles/octavia-openstack-octavia-ks-user\nopenstack/roles/octavia-openstack-octavia-rabbit-init\nopenstack/roles/octavia-openstack-octavia-worker\nopenstack/roles/ovn-controller\nopenstack/roles/ovn-northd\nopenstack/roles/ovn-openstack-ovn-controller\nopenstack/roles/ovn-openstack-ovn-northd\nopenstack/roles/ovn-ovsdb\nopenstack/roles/placement-openstack-placement-api\nopenstack/roles/placement-openstack-placement-db-init\nopenstack/roles/placement-openstack-placement-db-sync\nopenstack/roles/placement-openstack-placement-ks-endpoints\nopenstack/roles/placement-openstack-placement-ks-service\nopenstack/roles/placement-openstack-placement-ks-user\nopenstack/roles/pxc-operator\nopenstack/roles/rabbitmq-barbican-peer-discovery\nopenstack/roles/rabbitmq-cinder-peer-discovery\nopenstack/roles/rabbitmq-cluster-operator\nopenstack/roles/rabbitmq-glance-peer-discovery\nopenstack/roles/rabbitmq-heat-peer-discovery\nopenstack/roles/rabbitmq-keystone-peer-discovery\nopenstack/roles/rabbitmq-magnum-peer-discovery\nopenstack/roles/rabbitmq-manila-peer-discovery\nopenstack/roles/rabbitmq-messaging-topology-operator\nopenstack/roles/rabbitmq-neutron-peer-discovery\nopenstack/roles/rabbitmq-nova-peer-discovery\nopenstack/roles/rabbitmq-octavia-peer-discovery\nopenstack/roles/rook-ceph-cmd-reporter\nopenstack/roles/rook-ceph-mgr\nopenstack/roles/rook-ceph-osd\nopenstack/roles/rook-ceph-purge-osd\nopenstack/roles/staffeln-conductor\nopenstack/roles/staffeln-openstack-staffeln-api\nopenstack/roles/staffeln-openstack-staffeln-conductor\nopenstack/roles/staffeln-openstack-staffeln-db-init\nopenstack/roles/staffeln-openstack-staffeln-db-sync\nopenstack/roles/tempest-openstack-tempest-ks-user\nopenstack/roles/tempest-openstack-tempest-run-tests\nopenstack/secrets/barbican-api-certs\nopenstack/secrets/barbican-db-admin\nopenstack/secrets/barbican-db-user\nopenstack/secrets/barbican-etc\nopenstack/secrets/barbican-keystone-admin\nopenstack/secrets/barbican-keystone-user\nopenstack/secrets/barbican-rabbitmq-admin\nopenstack/secrets/barbican-rabbitmq-user\nopenstack/secrets/cinder-api-certs\nopenstack/secrets/cinder-backup-rbd-keyring\nopenstack/secrets/cinder-db-admin\nopenstack/secrets/cinder-db-user\nopenstack/secrets/cinder-etc\nopenstack/secrets/cinder-keystone-admin\nopenstack/secrets/cinder-keystone-test\nopenstack/serviceaccounts/barbican-api\nopenstack/secrets/cinder-keystone-user\nopenstack/serviceaccounts/barbican-db-init\nopenstack/secrets/cinder-rabbitmq-admin\nopenstack/serviceaccounts/barbican-db-sync\nopenstack/secrets/cinder-rabbitmq-user\nopenstack/serviceaccounts/barbican-ks-endpoints\nopenstack/secrets/cinder-volume-rbd-keyring\nopenstack/secrets/glance-api-certs\nopenstack/serviceaccounts/barbican-ks-service\nopenstack/secrets/glance-db-admin\nopenstack/serviceaccounts/barbican-ks-user\nopenstack/secrets/glance-db-user\nopenstack/serviceaccounts/barbican-rabbit-init\nopenstack/secrets/glance-etc\nopenstack/serviceaccounts/barbican-test\nopenstack/secrets/glance-keystone-admin\nopenstack/serviceaccounts/cinder-api\nopenstack/secrets/glance-keystone-test\nopenstack/serviceaccounts/cinder-backup\nopenstack/secrets/glance-keystone-user\nopenstack/serviceaccounts/cinder-backup-storage-init\nopenstack/secrets/glance-rabbitmq-admin\nopenstack/serviceaccounts/cinder-bootstrap\nopenstack/secrets/glance-rabbitmq-user\nopenstack/serviceaccounts/cinder-create-internal-tenant\nopenstack/secrets/heat-api-certs\nopenstack/secrets/heat-cfn-certs\nopenstack/serviceaccounts/cinder-db-init\nopenstack/serviceaccounts/cinder-db-sync\nopenstack/serviceaccounts/cinder-ks-endpoints\nopenstack/serviceaccounts/cinder-ks-service\nopenstack/serviceaccounts/cinder-ks-user\nopenstack/serviceaccounts/cinder-rabbit-init\nopenstack/serviceaccounts/cinder-scheduler\nopenstack/serviceaccounts/cinder-storage-init\nopenstack/serviceaccounts/cinder-test\nopenstack/serviceaccounts/cinder-volume\nopenstack/serviceaccounts/cinder-volume-usage-audit\nopenstack/serviceaccounts/default\nopenstack/serviceaccounts/glance-api\nopenstack/serviceaccounts/glance-db-init\nopenstack/serviceaccounts/glance-db-sync\nopenstack/serviceaccounts/glance-ks-endpoints\nopenstack/serviceaccounts/glance-ks-service\nopenstack/serviceaccounts/glance-ks-user\nopenstack/serviceaccounts/glance-metadefs-load\nopenstack/serviceaccounts/glance-rabbit-init\nopenstack/serviceaccounts/glance-storage-init\nopenstack/serviceaccounts/glance-test\nopenstack/serviceaccounts/heat-api\nopenstack/serviceaccounts/heat-bootstrap\nopenstack/serviceaccounts/heat-cfn\nopenstack/serviceaccounts/heat-db-init\nopenstack/serviceaccounts/heat-db-sync\nopenstack/serviceaccounts/heat-engine\nopenstack/serviceaccounts/heat-engine-cleaner\nopenstack/serviceaccounts/heat-ks-endpoints\nopenstack/serviceaccounts/heat-ks-service\nopenstack/serviceaccounts/heat-ks-user\nopenstack/serviceaccounts/heat-ks-user-domain\nopenstack/serviceaccounts/heat-purge-deleted\nopenstack/serviceaccounts/heat-rabbit-init\nopenstack/serviceaccounts/heat-test\nopenstack/serviceaccounts/heat-trusts\nopenstack/serviceaccounts/horizon\nopenstack/serviceaccounts/horizon-db-init\nopenstack/serviceaccounts/horizon-db-sync\nopenstack/serviceaccounts/horizon-test\nopenstack/serviceaccounts/keepalived\nopenstack/serviceaccounts/keystone-api\nopenstack/serviceaccounts/keystone-bootstrap\nopenstack/serviceaccounts/keystone-credential-rotate\nopenstack/serviceaccounts/keystone-credential-setup\nopenstack/serviceaccounts/keystone-db-init\nopenstack/serviceaccounts/keystone-db-sync\nopenstack/serviceaccounts/keystone-domain-manage\nopenstack/serviceaccounts/keystone-fernet-rotate\nopenstack/serviceaccounts/keystone-fernet-setup\nopenstack/serviceaccounts/keystone-rabbit-init\nopenstack/serviceaccounts/keystone-test\nopenstack/serviceaccounts/libvirt\nopenstack/serviceaccounts/magnum-api\nopenstack/serviceaccounts/magnum-conductor\nopenstack/serviceaccounts/magnum-db-init\nopenstack/serviceaccounts/magnum-db-sync\nopenstack/serviceaccounts/magnum-ks-endpoints\nopenstack/serviceaccounts/magnum-ks-service\nopenstack/serviceaccounts/magnum-ks-user\nopenstack/serviceaccounts/magnum-ks-user-domain\nopenstack/serviceaccounts/magnum-rabbit-init\nopenstack/serviceaccounts/manila-api\nopenstack/serviceaccounts/manila-bootstrap\nopenstack/serviceaccounts/manila-data\nopenstack/serviceaccounts/manila-db-init\nopenstack/serviceaccounts/manila-db-sync\nopenstack/serviceaccounts/manila-ks-endpoints\nopenstack/serviceaccounts/manila-ks-service\nopenstack/serviceaccounts/manila-ks-user\nopenstack/secrets/heat-db-admin\nopenstack/secrets/heat-db-user\nopenstack/secrets/heat-etc\nopenstack/secrets/heat-keystone-admin\nopenstack/secrets/heat-keystone-stack-user\nopenstack/secrets/heat-keystone-test\nopenstack/secrets/heat-keystone-trustee\nopenstack/secrets/heat-keystone-user\nopenstack/secrets/heat-rabbitmq-admin\nopenstack/secrets/heat-rabbitmq-user\nopenstack/secrets/horizon-db-admin\nopenstack/secrets/horizon-db-user\nopenstack/secrets/horizon-etc\nopenstack/secrets/horizon-int-certs\nopenstack/secrets/horizon-keystone-admin\nopenstack/secrets/images-rbd-keyring\nopenstack/secrets/internal-percona-xtradb\nopenstack/secrets/keepalived-etc\nopenstack/secrets/keystone-api-certs\nopenstack/secrets/keystone-credential-keys\nopenstack/secrets/keystone-db-admin\nopenstack/secrets/keystone-db-user\nopenstack/secrets/keystone-etc\nopenstack/secrets/keystone-fernet-keys\nopenstack/secrets/keystone-keystone-admin\nopenstack/secrets/keystone-keystone-test\nopenstack/secrets/keystone-rabbitmq-admin\nopenstack/secrets/keystone-rabbitmq-user\nopenstack/secrets/libvirt-api-ca\nopenstack/secrets/libvirt-etc\nopenstack/secrets/libvirt-libvirt-default-6bgrg-api\nopenstack/secrets/libvirt-libvirt-default-6bgrg-vnc\nopenstack/secrets/libvirt-libvirt-default-etc\nopenstack/secrets/libvirt-vnc-ca\nopenstack/secrets/magnum-api-certs\nopenstack/secrets/magnum-db-admin\nopenstack/secrets/magnum-db-user\nopenstack/secrets/magnum-etc\nopenstack/secrets/magnum-keystone-admin\nopenstack/secrets/magnum-keystone-stack-user\nopenstack/secrets/magnum-keystone-user\nopenstack/secrets/magnum-rabbitmq-admin\nopenstack/secrets/magnum-rabbitmq-user\nopenstack/secrets/magnum-registry-certs\nopenstack/secrets/manila-api-certs\nopenstack/secrets/manila-db-admin\nopenstack/secrets/manila-db-user\nopenstack/secrets/manila-etc\nopenstack/secrets/manila-keystone-admin\nopenstack/secrets/manila-keystone-user\nopenstack/secrets/manila-rabbitmq-admin\nopenstack/secrets/manila-rabbitmq-user\nopenstack/secrets/manila-ssh-keys\nopenstack/secrets/neutron-db-admin\nopenstack/secrets/neutron-db-user\nopenstack/secrets/neutron-etc\nopenstack/secrets/neutron-keystone-admin\nopenstack/secrets/neutron-keystone-test\nopenstack/secrets/neutron-keystone-user\nopenstack/secrets/neutron-netns-cleanup-cron-default\nopenstack/secrets/neutron-ovn-metadata-agent-default\nopenstack/secrets/neutron-rabbitmq-admin\nopenstack/secrets/neutron-rabbitmq-user\nopenstack/secrets/neutron-server-certs\nopenstack/secrets/nova-api-certs\nopenstack/secrets/nova-compute-default\nopenstack/secrets/nova-db-admin\nopenstack/secrets/nova-db-api-admin\nopenstack/secrets/nova-db-api-user\nopenstack/secrets/nova-db-cell0-admin\nopenstack/secrets/nova-db-cell0-user\nopenstack/secrets/nova-db-user\nopenstack/secrets/nova-etc\nopenstack/secrets/nova-keystone-admin\nopenstack/secrets/nova-keystone-test\nopenstack/secrets/nova-keystone-user\nopenstack/secrets/nova-novncproxy-certs\nopenstack/secrets/nova-novncproxy-vencrypt\nopenstack/secrets/nova-rabbitmq-admin\nopenstack/secrets/nova-rabbitmq-user\nopenstack/secrets/nova-ssh\nopenstack/secrets/octavia-amphora-ssh-key\nopenstack/secrets/octavia-api-certs\nopenstack/secrets/octavia-client-ca\nopenstack/secrets/octavia-client-certs\nopenstack/secrets/octavia-db-admin\nopenstack/secrets/octavia-db-user\nopenstack/secrets/octavia-etc\nopenstack/secrets/octavia-health-manager-default\nopenstack/secrets/octavia-keystone-admin\nopenstack/secrets/octavia-keystone-test\nopenstack/secrets/octavia-keystone-user\nopenstack/serviceaccounts/manila-rabbit-init\nopenstack/serviceaccounts/manila-scheduler\nopenstack/serviceaccounts/manila-share\nopenstack/serviceaccounts/memcached-memcached\nopenstack/serviceaccounts/neutron-db-init\nopenstack/serviceaccounts/neutron-db-sync\nopenstack/serviceaccounts/neutron-ks-endpoints\nopenstack/serviceaccounts/neutron-ks-service\nopenstack/serviceaccounts/neutron-ks-user\nopenstack/serviceaccounts/neutron-netns-cleanup-cron\nopenstack/serviceaccounts/neutron-ovn-metadata-agent\nopenstack/serviceaccounts/neutron-rabbit-init\nopenstack/serviceaccounts/neutron-server\nopenstack/serviceaccounts/neutron-test\nopenstack/serviceaccounts/nova-api-metadata\nopenstack/serviceaccounts/nova-api-osapi\nopenstack/serviceaccounts/nova-bootstrap\nopenstack/serviceaccounts/nova-cell-setup\nopenstack/serviceaccounts/nova-cell-setup-cron\nopenstack/serviceaccounts/nova-compute\nopenstack/serviceaccounts/nova-conductor\nopenstack/serviceaccounts/nova-db-init\nopenstack/serviceaccounts/nova-db-sync\nopenstack/serviceaccounts/nova-ks-endpoints\nopenstack/serviceaccounts/nova-ks-service\nopenstack/serviceaccounts/nova-ks-user\nopenstack/serviceaccounts/nova-novncproxy\nopenstack/serviceaccounts/nova-rabbit-init\nopenstack/serviceaccounts/nova-scheduler\nopenstack/serviceaccounts/nova-service-cleaner\nopenstack/serviceaccounts/nova-test\nopenstack/serviceaccounts/octavia-api\nopenstack/serviceaccounts/octavia-bootstrap\nopenstack/serviceaccounts/octavia-db-init\nopenstack/serviceaccounts/octavia-db-sync\nopenstack/serviceaccounts/octavia-health-manager\nopenstack/serviceaccounts/octavia-housekeeping\nopenstack/serviceaccounts/octavia-ks-endpoints\nopenstack/serviceaccounts/octavia-ks-service\nopenstack/serviceaccounts/octavia-ks-user\nopenstack/serviceaccounts/octavia-rabbit-init\nopenstack/serviceaccounts/octavia-worker\nopenstack/serviceaccounts/openvswitch-server\nopenstack/serviceaccounts/ovn-controller\nopenstack/serviceaccounts/ovn-northd\nopenstack/serviceaccounts/ovn-ovsdb-nb\nopenstack/serviceaccounts/ovn-ovsdb-sb\nopenstack/serviceaccounts/placement-api\nopenstack/serviceaccounts/placement-db-init\nopenstack/serviceaccounts/placement-db-sync\nopenstack/serviceaccounts/placement-ks-endpoints\nopenstack/serviceaccounts/placement-ks-service\nopenstack/serviceaccounts/placement-ks-user\nopenstack/serviceaccounts/pxc-operator\nopenstack/serviceaccounts/rabbitmq-barbican-server\nopenstack/serviceaccounts/rabbitmq-cinder-server\nopenstack/serviceaccounts/rabbitmq-cluster-operator\nopenstack/serviceaccounts/rabbitmq-glance-server\nopenstack/serviceaccounts/rabbitmq-heat-server\nopenstack/serviceaccounts/rabbitmq-keystone-server\nopenstack/serviceaccounts/rabbitmq-magnum-server\nopenstack/serviceaccounts/rabbitmq-manila-server\nopenstack/serviceaccounts/rabbitmq-messaging-topology-operator\nopenstack/serviceaccounts/rabbitmq-neutron-server\nopenstack/serviceaccounts/rabbitmq-nova-server\nopenstack/serviceaccounts/rabbitmq-octavia-server\nopenstack/serviceaccounts/rook-ceph-cmd-reporter\nopenstack/serviceaccounts/rook-ceph-default\nopenstack/serviceaccounts/rook-ceph-mgr\nopenstack/secrets/octavia-persistence-db-admin\nopenstack/secrets/octavia-persistence-db-user\nopenstack/secrets/octavia-rabbitmq-admin\nopenstack/secrets/octavia-rabbitmq-user\nopenstack/secrets/octavia-server-ca\nopenstack/secrets/openstack-database-exporter-dsn\nopenstack/secrets/ovn-etc\nopenstack/secrets/ovn-vector-config\nopenstack/secrets/percona-xtradb\nopenstack/secrets/placement-api-certs\nopenstack/secrets/placement-db-admin\nopenstack/secrets/placement-db-user\nopenstack/secrets/placement-etc\nopenstack/secrets/placement-keystone-admin\nopenstack/secrets/placement-keystone-user\nopenstack/secrets/pvc-ceph-client-key\nopenstack/secrets/rabbitmq-barbican-default-user\nopenstack/secrets/rabbitmq-barbican-erlang-cookie\nopenstack/secrets/rabbitmq-cinder-default-user\nopenstack/secrets/rabbitmq-cinder-erlang-cookie\nopenstack/secrets/rabbitmq-glance-default-user\nopenstack/secrets/rabbitmq-glance-erlang-cookie\nopenstack/secrets/rabbitmq-heat-default-user\nopenstack/secrets/rabbitmq-heat-erlang-cookie\nopenstack/secrets/rabbitmq-keystone-default-user\nopenstack/secrets/rabbitmq-keystone-erlang-cookie\nopenstack/secrets/rabbitmq-magnum-default-user\nopenstack/secrets/rabbitmq-magnum-erlang-cookie\nopenstack/secrets/rabbitmq-manila-default-user\nopenstack/secrets/rabbitmq-manila-erlang-cookie\nopenstack/secrets/rabbitmq-messaging-topology-operator-webhook\nopenstack/secrets/rabbitmq-neutron-default-user\nopenstack/secrets/rabbitmq-neutron-erlang-cookie\nopenstack/secrets/rabbitmq-nova-default-user\nopenstack/secrets/rabbitmq-nova-erlang-cookie\nopenstack/secrets/rabbitmq-octavia-default-user\nopenstack/secrets/rabbitmq-octavia-erlang-cookie\nopenstack/secrets/rook-ceph-config\nopenstack/secrets/rook-ceph-crash-collector-keyring\nopenstack/secrets/rook-ceph-mon\nopenstack/secrets/rook-ceph-object-user-ceph-cosi\nopenstack/secrets/rook-ceph-rgw-ceph-a-keyring\nopenstack/secrets/rook-ceph-rgw-ceph-certs\nopenstack/secrets/rook-csi-cephfs-node\nopenstack/secrets/rook-csi-cephfs-provisioner\nopenstack/secrets/rook-csi-rbd-node\nopenstack/secrets/rook-csi-rbd-provisioner\nopenstack/secrets/sh.helm.release.v1.barbican.v1\nopenstack/secrets/sh.helm.release.v1.ceph-provisioners.v1\nopenstack/secrets/sh.helm.release.v1.ceph.v1\nopenstack/secrets/sh.helm.release.v1.cinder.v1\nopenstack/secrets/sh.helm.release.v1.glance.v1\nopenstack/secrets/sh.helm.release.v1.heat.v1\nopenstack/secrets/sh.helm.release.v1.horizon.v1\nopenstack/secrets/sh.helm.release.v1.keystone.v1\nopenstack/secrets/sh.helm.release.v1.libvirt.v1\nopenstack/secrets/sh.helm.release.v1.magnum.v1\nopenstack/secrets/sh.helm.release.v1.manila.v1\nopenstack/secrets/sh.helm.release.v1.memcached.v1\nopenstack/secrets/sh.helm.release.v1.neutron.v1\nopenstack/secrets/sh.helm.release.v1.nova.v1\nopenstack/secrets/sh.helm.release.v1.octavia.v1\nopenstack/secrets/sh.helm.release.v1.openvswitch.v1\nopenstack/secrets/sh.helm.release.v1.ovn.v1\nopenstack/secrets/sh.helm.release.v1.placement.v1\nopenstack/secrets/sh.helm.release.v1.pxc-operator.v1\nopenstack/secrets/sh.helm.release.v1.rabbitmq-cluster-operator.v1\nopenstack/secrets/sh.helm.release.v1.staffeln.v1\nopenstack/secrets/sh.helm.release.v1.tempest.v1\nopenstack/secrets/sh.helm.release.v1.valkey.v1\nopenstack/secrets/staffeln-db-admin\nopenstack/secrets/staffeln-db-user\nopenstack/secrets/staffeln-etc\nopenstack/secrets/tempest-etc\nopenstack/secrets/tempest-keystone-admin\nopenstack/secrets/tempest-keystone-user\nopenstack/secrets/valkey-ca\nopenstack/secrets/valkey-server-certs\nopenstack/serviceaccounts/rook-ceph-osd\nopenstack/serviceaccounts/rook-ceph-purge-osd\nopenstack/serviceaccounts/rook-ceph-rgw\nopenstack/serviceaccounts/staffeln-api\nopenstack/serviceaccounts/staffeln-conductor\nopenstack/serviceaccounts/staffeln-db-init\nopenstack/serviceaccounts/staffeln-db-sync\nopenstack/serviceaccounts/tempest-ks-user\nopenstack/serviceaccounts/tempest-run-tests\nopenstack/serviceaccounts/valkey\nopenstack/services/barbican-api\nopenstack/services/ceph-mon\nopenstack/services/cinder-api\nopenstack/services/glance-api\nopenstack/services/heat-api\nopenstack/services/heat-cfn\nopenstack/services/horizon\nopenstack/services/horizon-int\nopenstack/services/keystone-api\nopenstack/services/magnum-api\nopenstack/services/magnum-registry\nopenstack/services/manila-api\nopenstack/services/memcached\nopenstack/services/memcached-metrics\nopenstack/services/neutron-server\nopenstack/services/nova-api\nopenstack/services/nova-metadata\nopenstack/services/nova-novncproxy\nopenstack/services/octavia-api\nopenstack/services/openstack-exporter\nopenstack/services/ovn-ovsdb-nb\nopenstack/services/ovn-ovsdb-sb\nopenstack/services/percona-xtradb-cluster-operator\nopenstack/services/percona-xtradb-haproxy\nopenstack/services/percona-xtradb-haproxy-metrics\nopenstack/services/percona-xtradb-haproxy-replicas\nopenstack/services/percona-xtradb-pxc\nopenstack/services/percona-xtradb-pxc-unready\nopenstack/services/placement-api\nopenstack/services/rabbitmq-barbican\nopenstack/services/rabbitmq-barbican-nodes\nopenstack/services/rabbitmq-cinder\nopenstack/services/rabbitmq-cinder-nodes\nopenstack/services/rabbitmq-glance\nopenstack/services/rabbitmq-glance-nodes\nopenstack/services/rabbitmq-heat\nopenstack/services/rabbitmq-heat-nodes\nopenstack/services/rabbitmq-keystone\nopenstack/services/rabbitmq-keystone-nodes\nopenstack/services/rabbitmq-magnum\nopenstack/services/rabbitmq-magnum-nodes\nopenstack/services/rabbitmq-manila\nopenstack/services/rabbitmq-manila-nodes\nopenstack/services/rabbitmq-messaging-topology-operator-webhook\nopenstack/services/rabbitmq-neutron\nopenstack/services/rabbitmq-neutron-nodes\nopenstack/services/rabbitmq-nova\nopenstack/services/rabbitmq-nova-nodes\nopenstack/services/rabbitmq-octavia\nopenstack/services/rabbitmq-octavia-nodes\nopenstack/services/rook-ceph-rgw-ceph\nopenstack/services/staffeln-api\nopenstack/services/valkey\nopenstack/services/valkey-headless\nopenstack/services/valkey-metrics\nopenstack/statefulsets/magnum-conductor\nopenstack/statefulsets/ovn-ovsdb-nb\nopenstack/statefulsets/ovn-ovsdb-sb\nopenstack/statefulsets/percona-xtradb-haproxy\nopenstack/statefulsets/percona-xtradb-pxc\nopenstack/statefulsets/rabbitmq-barbican-server\nopenstack/statefulsets/rabbitmq-cinder-server\nopenstack/statefulsets/rabbitmq-glance-server\nopenstack/statefulsets/rabbitmq-heat-server\nopenstack/statefulsets/rabbitmq-keystone-server\nopenstack/statefulsets/rabbitmq-magnum-server\nopenstack/statefulsets/rabbitmq-manila-server\nopenstack/statefulsets/rabbitmq-neutron-server\nopenstack/statefulsets/rabbitmq-nova-server\nopenstack/statefulsets/rabbitmq-octavia-server\nopenstack/statefulsets/valkey-node\norc-system/roles/orc-leader-election-role\norc-system/serviceaccounts/default\norc-system/rolebindings/orc-leader-election-rolebinding\norc-system/services/orc-controller-manager-metrics-service\nrook-ceph/configmaps/kube-root-ca.crt\nrook-ceph/configmaps/rook-ceph-csi-config\nrook-ceph/configmaps/rook-ceph-csi-mapping-config\nrook-ceph/configmaps/rook-ceph-operator-config\nsecretgen-controller/configmaps/kube-root-ca.crt\nrook-ceph/deployment/rook-ceph-operator\nsecretgen-controller/deployment/secretgen-controller\nrook-ceph/pods/rook-ceph-operator-7b66cfb94c-tj94j\nsecretgen-controller/pods/secretgen-controller-5cf976ccc7-szs5h\nrook-ceph/rolebindings/cephfs-csi-provisioner-role-cfg\nrook-ceph/rolebindings/rbd-csi-provisioner-role-cfg\nrook-ceph/rolebindings/rook-ceph-cluster-mgmt\nrook-ceph/rolebindings/rook-ceph-cmd-reporter\nrook-ceph/rolebindings/rook-ceph-mgr\nrook-ceph/rolebindings/rook-ceph-mgr-system\nrook-ceph/rolebindings/rook-ceph-mgr-system-openstack\nrook-ceph/rolebindings/rook-ceph-osd\nrook-ceph/rolebindings/rook-ceph-purge-osd\nrook-ceph/rolebindings/rook-ceph-system\nrook-ceph/roles/cephfs-external-provisioner-cfg\nrook-ceph/roles/rbd-external-provisioner-cfg\nrook-ceph/roles/rook-ceph-cmd-reporter\nrook-ceph/roles/rook-ceph-mgr\nrook-ceph/roles/rook-ceph-osd\nrook-ceph/roles/rook-ceph-purge-osd\nrook-ceph/roles/rook-ceph-system\nrook-ceph/secrets/sh.helm.release.v1.rook-ceph.v1\nsecretgen-controller/serviceaccounts/default\nsecretgen-controller/serviceaccounts/secretgen-controller-sa\nrook-ceph/serviceaccounts/default\nrook-ceph/serviceaccounts/objectstorage-provisioner\nrook-ceph/serviceaccounts/rook-ceph-cmd-reporter\nrook-ceph/serviceaccounts/rook-ceph-default\nrook-ceph/serviceaccounts/rook-ceph-mgr\nrook-ceph/serviceaccounts/rook-ceph-osd\nrook-ceph/serviceaccounts/rook-ceph-purge-osd\nrook-ceph/serviceaccounts/rook-ceph-rgw\nrook-ceph/serviceaccounts/rook-ceph-system\nrook-ceph/serviceaccounts/rook-csi-cephfs-plugin-sa\nrook-ceph/serviceaccounts/rook-csi-cephfs-provisioner-sa\nrook-ceph/serviceaccounts/rook-csi-rbd-plugin-sa\nrook-ceph/serviceaccounts/rook-csi-rbd-provisioner-sa\norc-system/serviceaccounts/orc-controller-manager",
                            "stdout_lines": [
                                "xargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value",
                                "xargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value",
                                "xargs: warning: options --max-args and --replace/-I/-i are mutually exclusive, ignoring previous --max-args value",
                                "xargs: xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "auth-system/configmaps/keycloak-env-vars",
                                "auth-system/configmaps/kube-root-ca.crt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-bootstrap-system/configmaps/kube-root-ca.crt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-bootstrap-system/deployment/capi-kubeadm-bootstrap-controller-manager",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "auth-system/endpoints/keycloak",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "auth-system/endpoints/keycloak-headless",
                                "auth-system/endpoints/keycloak-metrics",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-bootstrap-system/endpoints/capi-kubeadm-bootstrap-webhook-service",
                                "auth-system/ingresses/keycloak",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "auth-system/networkpolicies/keycloak",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-bootstrap-system/pods/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "auth-system/pods/keycloak-0",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-bootstrap-system/rolebindings/capi-kubeadm-bootstrap-leader-election-rolebinding",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-bootstrap-system/roles/capi-kubeadm-bootstrap-leader-election-role",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-bootstrap-system/secrets/capi-kubeadm-bootstrap-webhook-service-cert",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "auth-system/secrets/keycloak",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "auth-system/secrets/keycloak-externaldb",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "auth-system/secrets/keycloak.199-204-45-156.nip.io-tls",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "auth-system/secrets/sh.helm.release.v1.keycloak.v1",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-bootstrap-system/serviceaccounts/capi-kubeadm-bootstrap-manager",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-bootstrap-system/serviceaccounts/default",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "auth-system/serviceaccounts/default",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "auth-system/serviceaccounts/keycloak",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-bootstrap-system/services/capi-kubeadm-bootstrap-webhook-service",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "auth-system/services/keycloak",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "auth-system/services/keycloak-headless",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "auth-system/services/keycloak-metrics",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "auth-system/statefulsets/keycloak",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-control-plane-system/configmaps/kube-root-ca.crt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-system/configmaps/kube-root-ca.crt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-control-plane-system/deployment/capi-kubeadm-control-plane-controller-manager",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-system/deployment/capi-controller-manager",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-control-plane-system/endpoints/capi-kubeadm-control-plane-webhook-service",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-system/endpoints/capi-webhook-service",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-control-plane-system/pods/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-system/pods/capi-controller-manager-bc4cf8c95-w8p6b",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-control-plane-system/rolebindings/capi-kubeadm-control-plane-leader-election-rolebinding",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-control-plane-system/roles/capi-kubeadm-control-plane-leader-election-role",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-control-plane-system/secrets/capi-kubeadm-control-plane-webhook-service-cert",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-system/rolebindings/capi-leader-election-rolebinding",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-system/roles/capi-leader-election-role",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-control-plane-system/serviceaccounts/capi-kubeadm-control-plane-manager",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-kubeadm-control-plane-system/serviceaccounts/default",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-system/secrets/capi-webhook-service-cert",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-system/serviceaccounts/capi-manager",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-system/serviceaccounts/default",
                                "capi-kubeadm-control-plane-system/services/capi-kubeadm-control-plane-webhook-service",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capi-system/services/capi-webhook-service",
                                "capo-system/configmaps/kube-root-ca.crt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/configmaps/cert-manager-webhook",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/configmaps/kube-root-ca.crt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capo-system/deployment/capo-controller-manager",
                                "cert-manager/deployment/cert-manager",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/deployment/cert-manager-cainjector",
                                "cert-manager/deployment/cert-manager-webhook",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capo-system/endpoints/capo-webhook-service",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/endpoints/cert-manager",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/endpoints/cert-manager-webhook",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capo-system/pods/capo-controller-manager-6975759b4b-tkxrs",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/pods/cert-manager-75c4c745bc-45s4r",
                                "cert-manager/pods/cert-manager-cainjector-64b59ddb75-tl5x7",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "cert-manager/pods/cert-manager-webhook-548949fc64-vkrlt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capo-system/rolebindings/capo-leader-election-rolebinding",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capo-system/roles/capo-leader-election-role",
                                "capo-system/secrets/capo-webhook-service-cert",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/rolebindings/cert-manager-cainjector:leaderelection",
                                "cert-manager/rolebindings/cert-manager-webhook:dynamic-serving",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/rolebindings/cert-manager:leaderelection",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capo-system/serviceaccounts/capo-manager",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capo-system/serviceaccounts/default",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/roles/cert-manager-cainjector:leaderelection",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/roles/cert-manager-webhook:dynamic-serving",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/roles/cert-manager:leaderelection",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "capo-system/services/capo-webhook-service",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/secrets/cert-manager-selfsigned-ca",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/secrets/cert-manager-webhook-ca",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "cert-manager/secrets/kube-prometheus-stack-ca",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/secrets/sh.helm.release.v1.cert-manager.v1",
                                "cert-manager/services/cert-manager",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/serviceaccounts/cert-manager",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/services/cert-manager-webhook",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/serviceaccounts/cert-manager-cainjector",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/serviceaccounts/cert-manager-webhook",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "cert-manager/serviceaccounts/default",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "default/configmaps/kube-root-ca.crt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/configmaps/ingress-nginx-controller",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/configmaps/ingress-nginx-tcp",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/configmaps/ingress-nginx-udp",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/configmaps/kube-root-ca.crt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "default/endpoints/kubernetes",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/daemonsets/ingress-nginx-controller",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/deployment/ingress-nginx-defaultbackend",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/endpoints/ingress-nginx-controller",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/endpoints/ingress-nginx-controller-admission",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/endpoints/ingress-nginx-controller-metrics",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/endpoints/ingress-nginx-defaultbackend",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/pods/ingress-nginx-controller-j4bqv",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/pods/ingress-nginx-defaultbackend-6987ff55cf-gpx4l",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/rolebindings/ingress-nginx",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/roles/ingress-nginx",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/secrets/ingress-nginx-admission",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/secrets/sh.helm.release.v1.ingress-nginx.v1",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/serviceaccounts/default",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/serviceaccounts/ingress-nginx",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/serviceaccounts/ingress-nginx-backend",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "default/serviceaccounts/default",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/services/ingress-nginx-controller",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/services/ingress-nginx-controller-admission",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/services/ingress-nginx-controller-metrics",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "ingress-nginx/services/ingress-nginx-defaultbackend",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "default/services/kubernetes",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-node-lease/configmaps/kube-root-ca.crt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-public/configmaps/cluster-info",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-public/configmaps/kube-root-ca.crt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-node-lease/serviceaccounts/default",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-public/rolebindings/kubeadm:bootstrap-signer-clusterinfo",
                                "kube-public/rolebindings/system:controller:bootstrap-signer",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-public/roles/kubeadm:bootstrap-signer-clusterinfo",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-public/roles/system:controller:bootstrap-signer",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/configmaps/cilium-config",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/configmaps/coredns",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/configmaps/extension-apiserver-authentication",
                                "kube-system/configmaps/kube-apiserver-legacy-service-account-token-tracking",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/configmaps/kube-proxy",
                                "kube-system/configmaps/kube-root-ca.crt",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/configmaps/kubeadm-config",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/configmaps/kubelet-config",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-public/serviceaccounts/default",
                                "kube-system/daemonsets/cilium",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/daemonsets/kube-proxy",
                                "kube-system/deployment/cilium-operator",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/deployment/coredns",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/endpoints/kube-dns",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/endpoints/kube-prometheus-stack-coredns",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/endpoints/kube-prometheus-stack-kube-controller-manager",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/endpoints/kube-prometheus-stack-kube-etcd",
                                "kube-system/endpoints/kube-prometheus-stack-kube-proxy",
                                "kube-system/endpoints/kube-prometheus-stack-kube-scheduler",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/endpoints/kube-prometheus-stack-kubelet",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "local-path-storage/configmaps/kube-root-ca.crt",
                                "local-path-storage/configmaps/local-path-config",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/pods/cilium-operator-869df985b8-kszk2",
                                "kube-system/pods/cilium-vdz4f",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/pods/coredns-67659f764b-6f2mm",
                                "kube-system/pods/coredns-67659f764b-j6fp4",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/pods/etcd-instance",
                                "kube-system/pods/kube-apiserver-instance",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "kube-system/pods/kube-controller-manager-instance",
                                "kube-system/pods/kube-proxy-sp2vs",
                                "kube-system/pods/kube-scheduler-instance",
                                "kube-system/pods/kube-vip-instance",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "local-path-storage/deployment/local-path-provisioner",
                                "kube-system/rolebindings/cilium-config-agent",
                                "kube-system/rolebindings/kube-proxy",
                                "kube-system/rolebindings/kubeadm:kubeadm-certs",
                                "kube-system/rolebindings/kubeadm:kubelet-config",
                                "kube-system/rolebindings/kubeadm:nodes-kubeadm-config",
                                "kube-system/rolebindings/system::extension-apiserver-authentication-reader",
                                "kube-system/rolebindings/system::leader-locking-kube-controller-manager",
                                "kube-system/rolebindings/system::leader-locking-kube-scheduler",
                                "kube-system/rolebindings/system:controller:bootstrap-signer",
                                "kube-system/rolebindings/system:controller:cloud-provider",
                                "kube-system/rolebindings/system:controller:token-cleaner",
                                "kube-system/roles/cilium-config-agent",
                                "kube-system/roles/extension-apiserver-authentication-reader",
                                "kube-system/roles/kube-proxy",
                                "kube-system/roles/kubeadm:kubeadm-certs",
                                "kube-system/roles/kubeadm:kubelet-config",
                                "kube-system/roles/kubeadm:nodes-kubeadm-config",
                                "kube-system/roles/system::leader-locking-kube-controller-manager",
                                "kube-system/roles/system::leader-locking-kube-scheduler",
                                "kube-system/roles/system:controller:bootstrap-signer",
                                "kube-system/roles/system:controller:cloud-provider",
                                "kube-system/roles/system:controller:token-cleaner",
                                "local-path-storage/pods/local-path-provisioner-679c578f5-7h8w5",
                                "kube-system/secrets/bootstrap-token-592olp",
                                "kube-system/secrets/bootstrap-token-vahv4y",
                                "kube-system/secrets/kubeadm-certs",
                                "kube-system/secrets/sh.helm.release.v1.cilium.v1",
                                "kube-system/serviceaccounts/attachdetach-controller",
                                "kube-system/serviceaccounts/bootstrap-signer",
                                "kube-system/serviceaccounts/certificate-controller",
                                "kube-system/serviceaccounts/cilium",
                                "kube-system/serviceaccounts/cilium-operator",
                                "kube-system/serviceaccounts/clusterrole-aggregation-controller",
                                "kube-system/serviceaccounts/coredns",
                                "kube-system/serviceaccounts/cronjob-controller",
                                "kube-system/serviceaccounts/daemon-set-controller",
                                "kube-system/serviceaccounts/default",
                                "kube-system/serviceaccounts/deployment-controller",
                                "kube-system/serviceaccounts/disruption-controller",
                                "kube-system/serviceaccounts/endpoint-controller",
                                "kube-system/serviceaccounts/endpointslice-controller",
                                "kube-system/serviceaccounts/endpointslicemirroring-controller",
                                "kube-system/serviceaccounts/ephemeral-volume-controller",
                                "kube-system/serviceaccounts/expand-controller",
                                "kube-system/serviceaccounts/generic-garbage-collector",
                                "kube-system/serviceaccounts/horizontal-pod-autoscaler",
                                "kube-system/serviceaccounts/job-controller",
                                "kube-system/serviceaccounts/kube-proxy",
                                "kube-system/serviceaccounts/namespace-controller",
                                "kube-system/serviceaccounts/node-controller",
                                "kube-system/serviceaccounts/persistent-volume-binder",
                                "kube-system/serviceaccounts/pod-garbage-collector",
                                "kube-system/serviceaccounts/pv-protection-controller",
                                "kube-system/serviceaccounts/pvc-protection-controller",
                                "kube-system/serviceaccounts/replicaset-controller",
                                "kube-system/serviceaccounts/replication-controller",
                                "kube-system/serviceaccounts/resourcequota-controller",
                                "kube-system/serviceaccounts/root-ca-cert-publisher",
                                "kube-system/serviceaccounts/service-account-controller",
                                "kube-system/serviceaccounts/service-controller",
                                "kube-system/serviceaccounts/statefulset-controller",
                                "kube-system/serviceaccounts/token-cleaner",
                                "kube-system/serviceaccounts/ttl-after-finished-controller",
                                "kube-system/serviceaccounts/ttl-controller",
                                "kube-system/services/kube-dns",
                                "kube-system/services/kube-prometheus-stack-coredns",
                                "kube-system/services/kube-prometheus-stack-kube-controller-manager",
                                "kube-system/services/kube-prometheus-stack-kube-etcd",
                                "kube-system/services/kube-prometheus-stack-kube-proxy",
                                "kube-system/services/kube-prometheus-stack-kube-scheduler",
                                "kube-system/services/kube-prometheus-stack-kubelet",
                                "magnum-system/configmaps/kube-root-ca.crt",
                                "local-path-storage/secrets/sh.helm.release.v1.local-path-provisioner.v1",
                                "local-path-storage/serviceaccounts/default",
                                "local-path-storage/serviceaccounts/local-path-provisioner",
                                "monitoring/configmaps/goldpinger-zap",
                                "monitoring/configmaps/ipmi-exporter",
                                "monitoring/configmaps/kube-prometheus-stack-alertmanager-overview",
                                "monitoring/configmaps/kube-prometheus-stack-apiserver",
                                "monitoring/configmaps/kube-prometheus-stack-cluster-total",
                                "monitoring/configmaps/kube-prometheus-stack-controller-manager",
                                "monitoring/configmaps/kube-prometheus-stack-dashboard-ceph-cluster",
                                "monitoring/configmaps/kube-prometheus-stack-dashboard-ceph-cluster-advanced",
                                "monitoring/configmaps/kube-prometheus-stack-dashboard-goldpinger",
                                "monitoring/configmaps/kube-prometheus-stack-dashboard-haproxy",
                                "monitoring/configmaps/kube-prometheus-stack-dashboard-host-details",
                                "monitoring/configmaps/kube-prometheus-stack-dashboard-hosts-overview",
                                "monitoring/configmaps/kube-prometheus-stack-dashboard-node-exporter-full",
                                "monitoring/configmaps/kube-prometheus-stack-dashboard-osd-device-details",
                                "monitoring/configmaps/kube-prometheus-stack-dashboard-osds-overview",
                                "monitoring/configmaps/kube-prometheus-stack-dashboard-pool-detail",
                                "monitoring/configmaps/kube-prometheus-stack-dashboard-pool-overview",
                                "monitoring/configmaps/kube-prometheus-stack-dashboard-rbd-details",
                                "monitoring/configmaps/kube-prometheus-stack-dashboard-rbd-overview",
                                "monitoring/configmaps/kube-prometheus-stack-etcd",
                                "monitoring/configmaps/kube-prometheus-stack-grafana",
                                "monitoring/configmaps/kube-prometheus-stack-grafana-config-dashboards",
                                "monitoring/configmaps/kube-prometheus-stack-grafana-datasource",
                                "monitoring/configmaps/kube-prometheus-stack-grafana-overview",
                                "monitoring/configmaps/kube-prometheus-stack-k8s-coredns",
                                "monitoring/configmaps/kube-prometheus-stack-k8s-resources-cluster",
                                "monitoring/configmaps/kube-prometheus-stack-k8s-resources-multicluster",
                                "monitoring/configmaps/kube-prometheus-stack-k8s-resources-namespace",
                                "monitoring/configmaps/kube-prometheus-stack-k8s-resources-node",
                                "monitoring/configmaps/kube-prometheus-stack-k8s-resources-pod",
                                "monitoring/configmaps/kube-prometheus-stack-k8s-resources-workload",
                                "monitoring/configmaps/kube-prometheus-stack-k8s-resources-workloads-namespace",
                                "monitoring/configmaps/kube-prometheus-stack-kubelet",
                                "monitoring/configmaps/kube-prometheus-stack-namespace-by-pod",
                                "monitoring/configmaps/kube-prometheus-stack-namespace-by-workload",
                                "monitoring/configmaps/kube-prometheus-stack-node-cluster-rsrc-use",
                                "monitoring/configmaps/kube-prometheus-stack-node-exporter",
                                "monitoring/configmaps/kube-prometheus-stack-node-rsrc-use",
                                "monitoring/configmaps/kube-prometheus-stack-nodes",
                                "monitoring/configmaps/kube-prometheus-stack-nodes-darwin",
                                "monitoring/configmaps/kube-prometheus-stack-persistentvolumesusage",
                                "monitoring/configmaps/kube-prometheus-stack-pod-total",
                                "monitoring/configmaps/kube-prometheus-stack-prometheus",
                                "monitoring/daemonsets/goldpinger",
                                "monitoring/configmaps/kube-prometheus-stack-prometheus-tls",
                                "monitoring/daemonsets/ipmi-exporter",
                                "monitoring/configmaps/kube-prometheus-stack-proxy",
                                "monitoring/daemonsets/kube-prometheus-stack-prometheus-node-exporter",
                                "monitoring/configmaps/kube-prometheus-stack-scheduler",
                                "monitoring/daemonsets/node-feature-discovery-worker",
                                "monitoring/configmaps/kube-prometheus-stack-workload-total",
                                "monitoring/daemonsets/vector",
                                "monitoring/configmaps/kube-root-ca.crt",
                                "monitoring/configmaps/loki",
                                "monitoring/configmaps/loki-alerting-rules",
                                "monitoring/configmaps/loki-gateway",
                                "monitoring/configmaps/loki-runtime",
                                "monitoring/configmaps/node-feature-discovery-master-conf",
                                "monitoring/configmaps/node-feature-discovery-topology-updater-conf",
                                "monitoring/configmaps/node-feature-discovery-worker-conf",
                                "monitoring/configmaps/prometheus-kube-prometheus-stack-prometheus-rulefiles-0",
                                "monitoring/configmaps/vector",
                                "monitoring/deployment/kube-prometheus-stack-grafana",
                                "monitoring/deployment/kube-prometheus-stack-kube-state-metrics",
                                "monitoring/deployment/kube-prometheus-stack-operator",
                                "monitoring/deployment/loki-gateway",
                                "monitoring/deployment/node-feature-discovery-gc",
                                "monitoring/deployment/node-feature-discovery-master",
                                "monitoring/deployment/prometheus-pushgateway",
                                "monitoring/endpoints/alertmanager-operated",
                                "monitoring/endpoints/goldpinger",
                                "monitoring/endpoints/kube-prometheus-stack-alertmanager",
                                "monitoring/endpoints/kube-prometheus-stack-grafana",
                                "monitoring/endpoints/kube-prometheus-stack-kube-state-metrics",
                                "monitoring/endpoints/kube-prometheus-stack-operator",
                                "monitoring/endpoints/kube-prometheus-stack-prometheus",
                                "monitoring/endpoints/kube-prometheus-stack-prometheus-node-exporter",
                                "monitoring/endpoints/loki",
                                "monitoring/endpoints/loki-chunks-cache",
                                "monitoring/endpoints/loki-gateway",
                                "monitoring/endpoints/loki-headless",
                                "monitoring/endpoints/loki-memberlist",
                                "monitoring/endpoints/loki-results-cache",
                                "monitoring/endpoints/prometheus-operated",
                                "monitoring/endpoints/prometheus-pushgateway",
                                "monitoring/endpoints/vector-headless",
                                "monitoring/ingresses/kube-prometheus-stack-alertmanager",
                                "monitoring/ingresses/kube-prometheus-stack-grafana",
                                "monitoring/ingresses/kube-prometheus-stack-prometheus",
                                "magnum-system/serviceaccounts/default",
                                "monitoring/pods/alertmanager-kube-prometheus-stack-alertmanager-0",
                                "monitoring/pods/goldpinger-7jzp8",
                                "monitoring/pods/kube-prometheus-stack-grafana-668bfb9659-ft52b",
                                "monitoring/pods/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m",
                                "monitoring/pods/kube-prometheus-stack-operator-cd88cf4bf-lzh7g",
                                "monitoring/pods/kube-prometheus-stack-prometheus-node-exporter-59qlm",
                                "monitoring/pods/loki-0",
                                "monitoring/pods/loki-chunks-cache-0",
                                "monitoring/pods/loki-gateway-cf54cb88c-zv654",
                                "monitoring/pods/loki-results-cache-0",
                                "monitoring/pods/node-feature-discovery-gc-6675cbb6d9-zv9sn",
                                "monitoring/pods/node-feature-discovery-master-8665476dbc-t4z5z",
                                "monitoring/pods/node-feature-discovery-worker-p8lmk",
                                "monitoring/pods/prometheus-kube-prometheus-stack-prometheus-0",
                                "monitoring/pods/prometheus-pushgateway-7b8659c68b-28dht",
                                "monitoring/pods/vector-qzjms",
                                "monitoring/persistentvolumeclaims/alertmanager-kube-prometheus-stack-alertmanager-db-alertmanager-kube-prometheus-stack-alertmanager-0",
                                "monitoring/persistentvolumeclaims/prometheus-kube-prometheus-stack-prometheus-db-prometheus-kube-prometheus-stack-prometheus-0",
                                "monitoring/persistentvolumeclaims/storage-loki-0",
                                "monitoring/rolebindings/kube-prometheus-stack-grafana",
                                "monitoring/rolebindings/kube-prometheus-stack-pod-tls-sidecar",
                                "monitoring/rolebindings/node-feature-discovery-worker",
                                "monitoring/roles/kube-prometheus-stack-grafana",
                                "monitoring/roles/kube-prometheus-stack-pod-tls-sidecar",
                                "monitoring/roles/node-feature-discovery-worker",
                                "monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager",
                                "monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-generated",
                                "monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-tls-assets-0",
                                "monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-web-config",
                                "monitoring/secrets/alertmanager-tls",
                                "monitoring/secrets/grafana-tls",
                                "monitoring/secrets/kube-prometheus-stack-admission",
                                "monitoring/secrets/kube-prometheus-stack-alertmanager-client-secret",
                                "monitoring/secrets/kube-prometheus-stack-alertmanager-cookie-secret",
                                "monitoring/secrets/kube-prometheus-stack-alertmanager-oauth2-proxy",
                                "monitoring/secrets/kube-prometheus-stack-etcd-client-cert",
                                "monitoring/secrets/kube-prometheus-stack-grafana",
                                "monitoring/secrets/kube-prometheus-stack-grafana-client-secret",
                                "monitoring/secrets/kube-prometheus-stack-prometheus-client-secret",
                                "monitoring/secrets/kube-prometheus-stack-prometheus-cookie-secret",
                                "monitoring/secrets/kube-prometheus-stack-prometheus-node-exporter-59qlm-tls",
                                "monitoring/secrets/kube-prometheus-stack-prometheus-oauth2-proxy",
                                "monitoring/secrets/prometheus-kube-prometheus-stack-prometheus",
                                "monitoring/secrets/prometheus-kube-prometheus-stack-prometheus-0-tls",
                                "monitoring/secrets/prometheus-kube-prometheus-stack-prometheus-tls-assets-0",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/configmaps/barbican-bin",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "monitoring/secrets/prometheus-kube-prometheus-stack-prometheus-web-config",
                                "openstack/configmaps/ceph-csi-config",
                                "monitoring/secrets/prometheus-tls",
                                "monitoring/secrets/sh.helm.release.v1.goldpinger.v1",
                                "openstack/configmaps/ceph-etc",
                                "monitoring/secrets/sh.helm.release.v1.kube-prometheus-stack.v1",
                                "openstack/configmaps/cinder-bin",
                                "monitoring/secrets/sh.helm.release.v1.loki.v1",
                                "openstack/configmaps/glance-bin",
                                "monitoring/secrets/sh.helm.release.v1.node-feature-discovery.v1",
                                "openstack/configmaps/heat-bin",
                                "monitoring/secrets/sh.helm.release.v1.prometheus-pushgateway.v1",
                                "openstack/configmaps/horizon-bin",
                                "monitoring/secrets/sh.helm.release.v1.vector.v1",
                                "openstack/configmaps/keepalived-bin",
                                "openstack/configmaps/keystone-bin",
                                "openstack/configmaps/keystone-openid-metadata",
                                "openstack/configmaps/kube-root-ca.crt",
                                "openstack/configmaps/libvirt-bin",
                                "openstack/configmaps/libvirt-libvirt-default-bin",
                                "openstack/configmaps/magnum-bin",
                                "openstack/configmaps/magnum-cluster-api-proxy-config",
                                "openstack/configmaps/manila-bin",
                                "openstack/configmaps/memcached-memcached-bin",
                                "openstack/configmaps/neutron-bin",
                                "openstack/configmaps/nova-bin",
                                "openstack/configmaps/octavia-bin",
                                "openstack/configmaps/openvswitch-bin",
                                "openstack/configmaps/ovn-bin",
                                "openstack/configmaps/percona-xtradb-haproxy",
                                "openstack/configmaps/percona-xtradb-pxc",
                                "openstack/configmaps/placement-bin",
                                "openstack/configmaps/rabbitmq-barbican-plugins-conf",
                                "openstack/configmaps/rabbitmq-barbican-server-conf",
                                "openstack/configmaps/rabbitmq-cinder-plugins-conf",
                                "openstack/configmaps/rabbitmq-cinder-server-conf",
                                "openstack/configmaps/rabbitmq-glance-plugins-conf",
                                "openstack/configmaps/rabbitmq-glance-server-conf",
                                "openstack/configmaps/rabbitmq-heat-plugins-conf",
                                "openstack/configmaps/rabbitmq-heat-server-conf",
                                "openstack/configmaps/rabbitmq-keystone-plugins-conf",
                                "openstack/configmaps/rabbitmq-keystone-server-conf",
                                "openstack/configmaps/rabbitmq-magnum-plugins-conf",
                                "openstack/configmaps/rabbitmq-magnum-server-conf",
                                "openstack/configmaps/rabbitmq-manila-plugins-conf",
                                "openstack/configmaps/rabbitmq-manila-server-conf",
                                "openstack/configmaps/rabbitmq-neutron-plugins-conf",
                                "openstack/configmaps/rabbitmq-neutron-server-conf",
                                "openstack/configmaps/rabbitmq-nova-plugins-conf",
                                "openstack/configmaps/rabbitmq-nova-server-conf",
                                "openstack/configmaps/rabbitmq-octavia-plugins-conf",
                                "openstack/configmaps/rabbitmq-octavia-server-conf",
                                "openstack/configmaps/rook-ceph-mon-endpoints",
                                "openstack/configmaps/rook-ceph-pdbstatemap",
                                "openstack/configmaps/rook-ceph-rgw-ceph-mime-types",
                                "openstack/configmaps/rook-config-override",
                                "openstack/configmaps/staffeln-bin",
                                "openstack/configmaps/tempest-bin",
                                "monitoring/serviceaccounts/default",
                                "openstack/configmaps/valkey-configuration",
                                "monitoring/serviceaccounts/goldpinger",
                                "openstack/configmaps/valkey-health",
                                "openstack/configmaps/valkey-scripts",
                                "monitoring/serviceaccounts/kube-prometheus-stack-alertmanager",
                                "monitoring/serviceaccounts/kube-prometheus-stack-grafana",
                                "monitoring/serviceaccounts/kube-prometheus-stack-kube-state-metrics",
                                "monitoring/serviceaccounts/kube-prometheus-stack-operator",
                                "monitoring/serviceaccounts/kube-prometheus-stack-prometheus",
                                "monitoring/serviceaccounts/kube-prometheus-stack-prometheus-node-exporter",
                                "monitoring/serviceaccounts/loki",
                                "monitoring/serviceaccounts/node-feature-discovery",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "monitoring/serviceaccounts/node-feature-discovery-gc",
                                "monitoring/serviceaccounts/node-feature-discovery-worker",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "monitoring/serviceaccounts/prometheus-pushgateway",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "monitoring/serviceaccounts/vector",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/cronjobs/cinder-volume-usage-audit",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/cronjobs/heat-engine-cleaner",
                                "openstack/cronjobs/heat-purge-deleted",
                                "openstack/cronjobs/keystone-credential-rotate",
                                "openstack/cronjobs/keystone-fernet-rotate",
                                "openstack/cronjobs/nova-cell-setup",
                                "openstack/cronjobs/nova-service-cleaner",
                                "monitoring/services/alertmanager-operated",
                                "monitoring/services/goldpinger",
                                "monitoring/services/kube-prometheus-stack-alertmanager",
                                "monitoring/services/kube-prometheus-stack-grafana",
                                "monitoring/services/kube-prometheus-stack-kube-state-metrics",
                                "monitoring/services/kube-prometheus-stack-operator",
                                "monitoring/services/kube-prometheus-stack-prometheus",
                                "monitoring/services/kube-prometheus-stack-prometheus-node-exporter",
                                "monitoring/services/loki",
                                "monitoring/services/loki-chunks-cache",
                                "monitoring/services/loki-gateway",
                                "monitoring/services/loki-headless",
                                "monitoring/services/loki-memberlist",
                                "monitoring/services/loki-results-cache",
                                "monitoring/services/prometheus-operated",
                                "monitoring/services/prometheus-pushgateway",
                                "monitoring/services/vector-headless",
                                "monitoring/statefulsets/alertmanager-kube-prometheus-stack-alertmanager",
                                "monitoring/statefulsets/loki",
                                "monitoring/statefulsets/loki-chunks-cache",
                                "monitoring/statefulsets/loki-results-cache",
                                "monitoring/statefulsets/prometheus-kube-prometheus-stack-prometheus",
                                "openstack/daemonsets/keepalived",
                                "openstack/daemonsets/libvirt-libvirt-default",
                                "openstack/daemonsets/magnum-cluster-api-proxy",
                                "openstack/daemonsets/neutron-netns-cleanup-cron-default",
                                "openstack/daemonsets/neutron-ovn-metadata-agent-default",
                                "openstack/daemonsets/nova-compute-default",
                                "openstack/daemonsets/octavia-health-manager-default",
                                "openstack/daemonsets/openvswitch",
                                "openstack/daemonsets/ovn-controller",
                                "openstack/deployment/barbican-api",
                                "openstack/deployment/cinder-api",
                                "openstack/deployment/cinder-backup",
                                "openstack/deployment/cinder-scheduler",
                                "orc-system/configmaps/kube-root-ca.crt",
                                "openstack/deployment/cinder-volume",
                                "openstack/deployment/glance-api",
                                "openstack/deployment/heat-api",
                                "openstack/deployment/heat-cfn",
                                "openstack/deployment/heat-engine",
                                "openstack/deployment/horizon",
                                "openstack/deployment/keystone-api",
                                "openstack/deployment/magnum-api",
                                "openstack/deployment/magnum-registry",
                                "openstack/deployment/manila-api",
                                "openstack/deployment/manila-data",
                                "openstack/deployment/manila-scheduler",
                                "openstack/deployment/manila-share",
                                "openstack/deployment/memcached-memcached",
                                "openstack/deployment/neutron-server",
                                "openstack/deployment/nova-api-metadata",
                                "openstack/deployment/nova-api-osapi",
                                "openstack/deployment/nova-conductor",
                                "openstack/deployment/nova-novncproxy",
                                "openstack/deployment/nova-scheduler",
                                "openstack/deployment/octavia-api",
                                "openstack/deployment/octavia-housekeeping",
                                "openstack/deployment/octavia-worker",
                                "openstack/deployment/openstack-database-exporter",
                                "openstack/deployment/openstack-exporter",
                                "openstack/deployment/ovn-northd",
                                "openstack/deployment/placement-api",
                                "openstack/deployment/pxc-operator",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/deployment/rabbitmq-cluster-operator",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/deployment/rabbitmq-messaging-topology-operator",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/deployment/rook-ceph-crashcollector-instance",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/deployment/rook-ceph-rgw-ceph-a",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/deployment/staffeln-api",
                                "openstack/deployment/staffeln-conductor",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/barbican-api",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/ceph-mon",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/cinder-api",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/glance-api",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/heat-api",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/heat-cfn",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/horizon",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/horizon-int",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/keystone-api",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/magnum-api",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/magnum-registry",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/manila-api",
                                "error: the server doesn't have a resource type \"podsecuritypolicies\"",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/memcached",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/memcached-metrics",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/neutron-server",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/nova-api",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/nova-metadata",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/nova-novncproxy",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/octavia-api",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/openstack-exporter",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/ovn-ovsdb-nb",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/ovn-ovsdb-sb",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/percona-xtradb-cluster-operator",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/percona-xtradb-haproxy",
                                "xargs: warning: options --max-lines and --replace/-I/-i are mutually exclusive, ignoring previous --max-lines value",
                                "openstack/endpoints/percona-xtradb-haproxy-metrics",
                                "openstack/endpoints/percona-xtradb-haproxy-replicas",
                                "openstack/endpoints/percona-xtradb-pxc",
                                "openstack/endpoints/percona-xtradb-pxc-unready",
                                "openstack/endpoints/placement-api",
                                "openstack/endpoints/rabbitmq-barbican",
                                "openstack/endpoints/rabbitmq-barbican-nodes",
                                "openstack/endpoints/rabbitmq-cinder",
                                "openstack/endpoints/rabbitmq-cinder-nodes",
                                "openstack/endpoints/rabbitmq-glance",
                                "openstack/endpoints/rabbitmq-glance-nodes",
                                "openstack/endpoints/rabbitmq-heat",
                                "openstack/endpoints/rabbitmq-heat-nodes",
                                "openstack/endpoints/rabbitmq-keystone",
                                "openstack/endpoints/rabbitmq-keystone-nodes",
                                "openstack/endpoints/rabbitmq-magnum",
                                "openstack/ingresses/cloudformation",
                                "openstack/endpoints/rabbitmq-magnum-nodes",
                                "openstack/ingresses/compute",
                                "openstack/endpoints/rabbitmq-manila",
                                "openstack/ingresses/compute-novnc-proxy",
                                "openstack/endpoints/rabbitmq-manila-nodes",
                                "openstack/ingresses/container-infra",
                                "openstack/ingresses/container-infra-registry",
                                "openstack/endpoints/rabbitmq-messaging-topology-operator-webhook",
                                "openstack/ingresses/dashboard",
                                "openstack/endpoints/rabbitmq-neutron",
                                "openstack/ingresses/identity",
                                "openstack/endpoints/rabbitmq-neutron-nodes",
                                "openstack/ingresses/image",
                                "openstack/endpoints/rabbitmq-nova",
                                "openstack/ingresses/key-manager",
                                "openstack/endpoints/rabbitmq-nova-nodes",
                                "openstack/endpoints/rabbitmq-octavia",
                                "openstack/ingresses/load-balancer",
                                "openstack/endpoints/rabbitmq-octavia-nodes",
                                "openstack/ingresses/network",
                                "openstack/ingresses/orchestration",
                                "openstack/endpoints/rook-ceph-rgw-ceph",
                                "openstack/endpoints/staffeln-api",
                                "openstack/ingresses/placement",
                                "openstack/endpoints/valkey",
                                "openstack/ingresses/rook-ceph-cluster",
                                "openstack/ingresses/sharev2",
                                "openstack/endpoints/valkey-headless",
                                "openstack/ingresses/volumev3",
                                "openstack/endpoints/valkey-metrics",
                                "orc-system/endpoints/orc-controller-manager-metrics-service",
                                "orc-system/deployment/orc-controller-manager",
                                "openstack/networkpolicies/rabbitmq-cluster-operator",
                                "openstack/networkpolicies/rabbitmq-messaging-topology-operator",
                                "openstack/networkpolicies/valkey",
                                "openstack/jobs/barbican-db-init",
                                "openstack/jobs/barbican-db-sync",
                                "openstack/jobs/barbican-ks-endpoints",
                                "openstack/jobs/barbican-ks-service",
                                "openstack/jobs/barbican-ks-user",
                                "openstack/jobs/barbican-rabbit-init",
                                "openstack/jobs/cinder-backup-storage-init",
                                "openstack/jobs/cinder-bootstrap",
                                "openstack/jobs/cinder-create-internal-tenant",
                                "openstack/jobs/cinder-db-init",
                                "openstack/jobs/cinder-db-sync",
                                "openstack/jobs/cinder-ks-endpoints",
                                "openstack/jobs/cinder-ks-service",
                                "openstack/jobs/cinder-ks-user",
                                "openstack/jobs/cinder-rabbit-init",
                                "openstack/jobs/cinder-storage-init",
                                "openstack/jobs/cinder-volume-usage-audit-29540045",
                                "openstack/jobs/glance-db-init",
                                "openstack/jobs/glance-db-sync",
                                "openstack/jobs/glance-ks-endpoints",
                                "openstack/jobs/glance-ks-service",
                                "openstack/jobs/glance-ks-user",
                                "openstack/jobs/glance-metadefs-load",
                                "openstack/jobs/glance-rabbit-init",
                                "openstack/jobs/glance-storage-init",
                                "openstack/jobs/heat-bootstrap",
                                "openstack/jobs/heat-db-init",
                                "openstack/jobs/heat-db-sync",
                                "openstack/jobs/heat-domain-ks-user",
                                "openstack/jobs/heat-engine-cleaner-29540055",
                                "openstack/jobs/heat-engine-cleaner-29540060",
                                "openstack/jobs/heat-engine-cleaner-29540065",
                                "openstack/jobs/heat-ks-endpoints",
                                "openstack/jobs/heat-ks-service",
                                "openstack/jobs/heat-ks-user",
                                "openstack/jobs/heat-rabbit-init",
                                "openstack/jobs/heat-trusts",
                                "openstack/jobs/horizon-db-init",
                                "openstack/jobs/horizon-db-sync",
                                "openstack/jobs/keystone-bootstrap",
                                "openstack/jobs/keystone-credential-setup",
                                "openstack/jobs/keystone-db-init",
                                "openstack/jobs/keystone-db-sync",
                                "openstack/jobs/keystone-domain-manage",
                                "openstack/jobs/keystone-fernet-setup",
                                "openstack/jobs/keystone-rabbit-init",
                                "openstack/jobs/magnum-db-init",
                                "openstack/jobs/magnum-db-sync",
                                "openstack/jobs/magnum-domain-ks-user",
                                "openstack/jobs/magnum-ks-endpoints",
                                "openstack/jobs/magnum-ks-service",
                                "openstack/jobs/magnum-ks-user",
                                "openstack/jobs/magnum-rabbit-init",
                                "openstack/jobs/manila-bootstrap",
                                "openstack/jobs/manila-db-init",
                                "openstack/jobs/manila-db-sync",
                                "openstack/jobs/manila-ks-endpoints",
                                "openstack/jobs/manila-ks-service",
                                "openstack/jobs/manila-ks-user",
                                "openstack/jobs/manila-rabbit-init",
                                "openstack/jobs/neutron-db-init",
                                "openstack/jobs/neutron-db-sync",
                                "openstack/jobs/neutron-ks-endpoints",
                                "openstack/jobs/neutron-ks-service",
                                "openstack/jobs/neutron-ks-user",
                                "openstack/jobs/neutron-rabbit-init",
                                "openstack/jobs/nova-cell-setup-29540040",
                                "openstack/jobs/nova-db-init",
                                "openstack/jobs/nova-db-sync",
                                "openstack/jobs/nova-ks-endpoints",
                                "openstack/jobs/nova-ks-service",
                                "openstack/jobs/nova-ks-user",
                                "openstack/jobs/nova-rabbit-init",
                                "openstack/jobs/nova-service-cleaner-29540040",
                                "openstack/jobs/octavia-bootstrap",
                                "openstack/jobs/octavia-db-init",
                                "openstack/jobs/octavia-db-sync",
                                "openstack/jobs/octavia-ks-endpoints",
                                "openstack/jobs/octavia-ks-service",
                                "openstack/jobs/octavia-ks-user",
                                "openstack/jobs/octavia-rabbit-init",
                                "openstack/jobs/placement-db-init",
                                "openstack/jobs/placement-db-sync",
                                "openstack/jobs/placement-ks-endpoints",
                                "openstack/jobs/placement-ks-service",
                                "openstack/jobs/placement-ks-user",
                                "openstack/jobs/staffeln-db-init",
                                "openstack/jobs/staffeln-db-sync",
                                "openstack/jobs/tempest-ks-user",
                                "openstack/jobs/tempest-run-tests",
                                "openstack/pods/barbican-api-775987496d-z6jqv",
                                "openstack/pods/barbican-db-init-nm8k6",
                                "openstack/pods/barbican-db-sync-452x5",
                                "openstack/pods/barbican-ks-endpoints-w2ffg",
                                "openstack/pods/barbican-ks-service-8pm7j",
                                "openstack/pods/barbican-ks-user-fszfr",
                                "openstack/pods/barbican-rabbit-init-j5qmd",
                                "openstack/pods/cinder-api-86d7694f66-j97gj",
                                "openstack/pods/cinder-backup-dcfd7dfb7-sdwkc",
                                "openstack/pods/cinder-backup-storage-init-zmnkh",
                                "openstack/pods/cinder-bootstrap-wng86",
                                "openstack/pods/cinder-create-internal-tenant-6vgll",
                                "openstack/pods/cinder-db-init-mzm5b",
                                "openstack/pods/cinder-db-sync-mz6ls",
                                "openstack/pods/cinder-ks-endpoints-xv2tb",
                                "openstack/pods/cinder-ks-service-dlcxz",
                                "openstack/pods/cinder-ks-user-5bd5g",
                                "openstack/pods/cinder-rabbit-init-l4fpm",
                                "openstack/pods/cinder-scheduler-586f444995-p7grf",
                                "openstack/pods/cinder-storage-init-vt6br",
                                "openstack/pods/cinder-volume-66dc847979-qgp4l",
                                "openstack/pods/cinder-volume-usage-audit-29540045-jbmvh",
                                "openstack/pods/glance-api-65d579bfc8-6x76l",
                                "openstack/pods/glance-db-init-wbpff",
                                "openstack/pods/glance-db-sync-gk84f",
                                "openstack/pods/glance-ks-endpoints-dq2cc",
                                "openstack/pods/glance-ks-service-5h6bw",
                                "openstack/pods/glance-ks-user-lcfxr",
                                "openstack/pods/glance-metadefs-load-476tp",
                                "openstack/pods/glance-rabbit-init-c6rjt",
                                "openstack/pods/glance-storage-init-hdcpc",
                                "openstack/pods/heat-api-6d65f9477-kmbkl",
                                "openstack/pods/heat-bootstrap-9dwg2",
                                "openstack/pods/heat-cfn-f44db7787-t8f7m",
                                "openstack/pods/heat-db-init-fk8qw",
                                "openstack/pods/heat-db-sync-cxmcb",
                                "openstack/pods/heat-domain-ks-user-tq2c5",
                                "openstack/pods/heat-engine-64f8b77bfb-wngkr",
                                "openstack/pods/heat-engine-cleaner-29540055-p9pq9",
                                "openstack/pods/heat-engine-cleaner-29540060-z4g95",
                                "openstack/pods/heat-engine-cleaner-29540065-rcjr2",
                                "openstack/pods/heat-ks-endpoints-wwzbz",
                                "openstack/pods/heat-ks-service-8pxqz",
                                "openstack/pods/heat-ks-user-tfk98",
                                "openstack/pods/heat-rabbit-init-rbl9n",
                                "openstack/persistentvolumeclaims/data-ovn-ovsdb-nb-0",
                                "openstack/pods/heat-trusts-czrrv",
                                "openstack/persistentvolumeclaims/data-ovn-ovsdb-sb-0",
                                "openstack/pods/horizon-8cdd7b888-bvzvx",
                                "openstack/persistentvolumeclaims/datadir-percona-xtradb-pxc-0",
                                "openstack/pods/horizon-db-init-s5pbw",
                                "openstack/persistentvolumeclaims/persistence-rabbitmq-barbican-server-0",
                                "openstack/pods/horizon-db-sync-bgr2g",
                                "openstack/persistentvolumeclaims/persistence-rabbitmq-cinder-server-0",
                                "openstack/pods/keepalived-7jdfz",
                                "openstack/persistentvolumeclaims/persistence-rabbitmq-glance-server-0",
                                "openstack/persistentvolumeclaims/persistence-rabbitmq-heat-server-0",
                                "openstack/pods/keystone-api-c4656754c-mqbxm",
                                "openstack/pods/keystone-bootstrap-mdtrx",
                                "openstack/persistentvolumeclaims/persistence-rabbitmq-keystone-server-0",
                                "openstack/pods/keystone-credential-setup-6xsvx",
                                "openstack/persistentvolumeclaims/persistence-rabbitmq-magnum-server-0",
                                "openstack/pods/keystone-db-init-z5mwz",
                                "openstack/persistentvolumeclaims/persistence-rabbitmq-manila-server-0",
                                "openstack/pods/keystone-db-sync-zsq8z",
                                "openstack/pods/keystone-domain-manage-v865d",
                                "openstack/persistentvolumeclaims/persistence-rabbitmq-neutron-server-0",
                                "openstack/pods/keystone-fernet-setup-5rfqs",
                                "openstack/persistentvolumeclaims/persistence-rabbitmq-nova-server-0",
                                "openstack/pods/keystone-rabbit-init-m44qz",
                                "openstack/pods/libvirt-libvirt-default-6bgrg",
                                "openstack/persistentvolumeclaims/persistence-rabbitmq-octavia-server-0",
                                "openstack/pods/magnum-api-8549df7884-9b2zc",
                                "openstack/persistentvolumeclaims/valkey-data-valkey-node-0",
                                "openstack/pods/magnum-cluster-api-proxy-z2flh",
                                "openstack/pods/magnum-conductor-0",
                                "openstack/pods/magnum-db-init-dshrc",
                                "openstack/pods/magnum-db-sync-8ttpk",
                                "openstack/pods/magnum-domain-ks-user-vp8f2",
                                "openstack/pods/magnum-ks-endpoints-jvzvf",
                                "openstack/pods/magnum-ks-service-vdn67",
                                "openstack/pods/magnum-ks-user-4wvtj",
                                "openstack/pods/magnum-rabbit-init-w7jc7",
                                "openstack/pods/magnum-registry-c45778976-2zz96",
                                "openstack/pods/manila-api-5cdf958bd9-hmbmb",
                                "openstack/pods/manila-bootstrap-5wn97",
                                "openstack/pods/manila-data-75cbc955bd-27jjw",
                                "openstack/pods/manila-db-init-pbdm8",
                                "openstack/pods/manila-db-sync-rm9mz",
                                "openstack/pods/manila-ks-endpoints-d8nr9",
                                "openstack/pods/manila-ks-service-g7svt",
                                "openstack/pods/manila-ks-user-pr9mg",
                                "openstack/pods/manila-rabbit-init-74vjs",
                                "openstack/pods/manila-scheduler-5b584c8656-mmnnd",
                                "openstack/pods/manila-share-68879775b-rc6q9",
                                "openstack/pods/memcached-memcached-6479589586-9sxjx",
                                "openstack/pods/neutron-db-init-l7c9v",
                                "openstack/pods/neutron-db-sync-brwb5",
                                "openstack/pods/neutron-ks-endpoints-dstkg",
                                "openstack/pods/neutron-ks-service-sq4tp",
                                "openstack/pods/neutron-ks-user-kcfc4",
                                "openstack/pods/neutron-netns-cleanup-cron-default-8frwf",
                                "openstack/pods/neutron-ovn-metadata-agent-default-flhb5",
                                "openstack/pods/neutron-rabbit-init-rdnbf",
                                "openstack/pods/neutron-server-649c5974f6-5dkvl",
                                "openstack/pods/nova-api-metadata-546d94ddd7-btnrc",
                                "openstack/pods/nova-api-osapi-99c7b7cd8-2lnzr",
                                "openstack/pods/nova-bootstrap-trzqq",
                                "openstack/pods/nova-cell-setup-29540040-rtzd7",
                                "openstack/pods/nova-cell-setup-j97qh",
                                "openstack/pods/nova-compute-default-2v5pd",
                                "openstack/pods/nova-conductor-5474cb4b8d-bxzhq",
                                "openstack/pods/nova-db-init-b4sqh",
                                "openstack/pods/nova-db-sync-2rbjc",
                                "openstack/pods/nova-ks-endpoints-zwcm6",
                                "openstack/pods/nova-ks-service-fmj77",
                                "openstack/pods/nova-ks-user-t8xgz",
                                "openstack/pods/nova-novncproxy-85dd5b5965-z6hmj",
                                "openstack/pods/nova-rabbit-init-szpvx",
                                "openstack/pods/nova-scheduler-78775555d4-hb2j9",
                                "openstack/pods/nova-service-cleaner-29540040-cxdd4",
                                "openstack/pods/octavia-api-75db6578cf-m656r",
                                "openstack/pods/octavia-bootstrap-kwfv2",
                                "openstack/pods/octavia-db-init-wnz5h",
                                "openstack/pods/octavia-db-sync-rjq45",
                                "openstack/pods/octavia-health-manager-default-twmks",
                                "openstack/pods/octavia-housekeeping-87b98c47b-vqwct",
                                "openstack/pods/octavia-ks-endpoints-jdlzw",
                                "openstack/pods/octavia-ks-service-rkdp9",
                                "openstack/pods/octavia-ks-user-tjl52",
                                "openstack/pods/octavia-rabbit-init-vdqxf",
                                "openstack/pods/octavia-worker-774cddbcdc-qxl6k",
                                "openstack/pods/openstack-database-exporter-7c944bc9f-w2bdb",
                                "openstack/pods/openstack-exporter-74676fb4b4-jrkwh",
                                "openstack/pods/openvswitch-gj98d",
                                "openstack/pods/ovn-controller-6mbd4",
                                "openstack/pods/ovn-northd-6c6687ddd6-7grhs",
                                "openstack/pods/ovn-ovsdb-nb-0",
                                "openstack/pods/ovn-ovsdb-sb-0",
                                "openstack/pods/percona-xtradb-haproxy-0",
                                "openstack/pods/percona-xtradb-pxc-0",
                                "openstack/pods/placement-api-75695696c6-brsxj",
                                "openstack/pods/placement-db-init-89t92",
                                "openstack/pods/placement-db-sync-nvqjv",
                                "openstack/pods/placement-ks-endpoints-jmfl7",
                                "openstack/pods/placement-ks-service-qdjdz",
                                "openstack/pods/placement-ks-user-blkn9",
                                "openstack/pods/pxc-operator-7cff949c8b-7zp4j",
                                "openstack/pods/rabbitmq-barbican-server-0",
                                "openstack/rolebindings/barbican-barbican-api",
                                "openstack/pods/rabbitmq-cinder-server-0",
                                "openstack/rolebindings/barbican-barbican-db-init",
                                "openstack/pods/rabbitmq-cluster-operator-5448d56d95-vk9km",
                                "openstack/rolebindings/barbican-barbican-db-sync",
                                "openstack/pods/rabbitmq-glance-server-0",
                                "openstack/rolebindings/barbican-barbican-ks-endpoints",
                                "openstack/rolebindings/barbican-barbican-ks-service",
                                "openstack/pods/rabbitmq-heat-server-0",
                                "openstack/rolebindings/barbican-barbican-ks-user",
                                "openstack/pods/rabbitmq-keystone-server-0",
                                "openstack/rolebindings/barbican-barbican-rabbit-init",
                                "openstack/pods/rabbitmq-magnum-server-0",
                                "openstack/rolebindings/cinder-backup-storage-init",
                                "openstack/pods/rabbitmq-manila-server-0",
                                "openstack/pods/rabbitmq-messaging-topology-operator-7f8596f788-84l9x",
                                "openstack/rolebindings/cinder-cinder-api",
                                "openstack/pods/rabbitmq-neutron-server-0",
                                "openstack/rolebindings/cinder-cinder-backup",
                                "openstack/pods/rabbitmq-nova-server-0",
                                "openstack/pods/rabbitmq-octavia-server-0",
                                "openstack/rolebindings/cinder-cinder-bootstrap",
                                "openstack/pods/rook-ceph-crashcollector-instance-754c646bfd-htxl9",
                                "openstack/rolebindings/cinder-cinder-create-internal-tenant",
                                "openstack/pods/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw",
                                "openstack/rolebindings/cinder-cinder-db-init",
                                "openstack/pods/staffeln-api-6669c8779f-qgp4c",
                                "openstack/rolebindings/cinder-cinder-db-sync",
                                "openstack/pods/staffeln-conductor-7b5d99bcd4-ws4sl",
                                "openstack/rolebindings/cinder-cinder-ks-endpoints",
                                "openstack/pods/staffeln-db-init-p4pq4",
                                "openstack/rolebindings/cinder-cinder-ks-service",
                                "openstack/pods/staffeln-db-sync-khzx8",
                                "openstack/pods/tempest-ks-user-kwbf6",
                                "openstack/rolebindings/cinder-cinder-ks-user",
                                "openstack/pods/tempest-run-tests-g5plh",
                                "openstack/rolebindings/cinder-cinder-rabbit-init",
                                "openstack/rolebindings/cinder-cinder-scheduler",
                                "openstack/pods/valkey-node-0",
                                "openstack/rolebindings/cinder-cinder-test",
                                "openstack/rolebindings/cinder-cinder-volume",
                                "openstack/rolebindings/cinder-cinder-volume-usage-audit",
                                "openstack/rolebindings/cinder-storage-init",
                                "openstack/rolebindings/glance-glance-api",
                                "openstack/rolebindings/glance-glance-db-init",
                                "openstack/rolebindings/glance-glance-db-sync",
                                "openstack/rolebindings/glance-glance-ks-endpoints",
                                "openstack/rolebindings/glance-glance-ks-service",
                                "openstack/rolebindings/glance-glance-ks-user",
                                "openstack/rolebindings/glance-glance-metadefs-load",
                                "openstack/rolebindings/glance-glance-rabbit-init",
                                "openstack/rolebindings/glance-glance-storage-init",
                                "openstack/rolebindings/glance-glance-test",
                                "openstack/rolebindings/glance-storage-init",
                                "openstack/rolebindings/heat-heat-api",
                                "openstack/rolebindings/heat-heat-bootstrap",
                                "openstack/rolebindings/heat-heat-cfn",
                                "openstack/rolebindings/heat-heat-db-init",
                                "openstack/rolebindings/heat-heat-db-sync",
                                "openstack/rolebindings/heat-heat-engine",
                                "openstack/rolebindings/heat-heat-engine-cleaner",
                                "openstack/rolebindings/heat-heat-ks-endpoints",
                                "openstack/rolebindings/heat-heat-ks-service",
                                "openstack/rolebindings/heat-heat-ks-user",
                                "openstack/rolebindings/heat-heat-ks-user-domain",
                                "openstack/rolebindings/heat-heat-purge-deleted",
                                "openstack/rolebindings/heat-heat-rabbit-init",
                                "openstack/rolebindings/heat-heat-test",
                                "openstack/rolebindings/heat-heat-trusts",
                                "openstack/rolebindings/horizon-horizon",
                                "openstack/rolebindings/horizon-horizon-db-init",
                                "openstack/rolebindings/horizon-horizon-db-sync",
                                "openstack/rolebindings/horizon-horizon-test",
                                "openstack/rolebindings/keepalived",
                                "openstack/rolebindings/keystone-credential-rotate",
                                "openstack/rolebindings/keystone-credential-setup",
                                "openstack/rolebindings/keystone-fernet-rotate",
                                "openstack/rolebindings/keystone-fernet-setup",
                                "openstack/rolebindings/keystone-keystone-api",
                                "openstack/rolebindings/keystone-keystone-bootstrap",
                                "openstack/rolebindings/keystone-keystone-credential-rotate",
                                "openstack/rolebindings/keystone-keystone-db-init",
                                "openstack/rolebindings/keystone-keystone-db-sync",
                                "openstack/rolebindings/keystone-keystone-domain-manage",
                                "openstack/rolebindings/keystone-keystone-fernet-rotate",
                                "openstack/rolebindings/keystone-keystone-test",
                                "openstack/rolebindings/libvirt-cert-manager",
                                "openstack/rolebindings/libvirt-libvirt",
                                "openstack/rolebindings/magnum-magnum-api",
                                "openstack/rolebindings/magnum-magnum-conductor",
                                "openstack/rolebindings/magnum-magnum-db-init",
                                "openstack/rolebindings/magnum-magnum-db-sync",
                                "openstack/rolebindings/magnum-magnum-ks-endpoints",
                                "openstack/rolebindings/magnum-magnum-ks-service",
                                "openstack/rolebindings/magnum-magnum-ks-user",
                                "openstack/rolebindings/magnum-magnum-ks-user-domain",
                                "openstack/rolebindings/magnum-magnum-rabbit-init",
                                "openstack/rolebindings/manila-manila-api",
                                "openstack/rolebindings/manila-manila-data",
                                "openstack/rolebindings/manila-manila-db-init",
                                "openstack/rolebindings/manila-manila-db-sync",
                                "openstack/rolebindings/manila-manila-ks-endpoints",
                                "openstack/rolebindings/manila-manila-ks-service",
                                "openstack/rolebindings/manila-manila-ks-user",
                                "openstack/rolebindings/manila-manila-rabbit-init",
                                "openstack/rolebindings/manila-manila-scheduler",
                                "openstack/rolebindings/manila-manila-share",
                                "openstack/rolebindings/neutron-neutron-db-init",
                                "openstack/rolebindings/neutron-neutron-db-sync",
                                "openstack/rolebindings/neutron-neutron-ks-endpoints",
                                "openstack/rolebindings/neutron-neutron-ks-service",
                                "openstack/rolebindings/neutron-neutron-ks-user",
                                "openstack/rolebindings/neutron-neutron-ovn-metadata-agent",
                                "openstack/rolebindings/neutron-neutron-rabbit-init",
                                "openstack/rolebindings/neutron-neutron-server",
                                "openstack/rolebindings/neutron-neutron-test",
                                "openstack/rolebindings/nova-nova-api-metadata",
                                "openstack/rolebindings/nova-nova-api-osapi",
                                "openstack/rolebindings/nova-nova-bootstrap",
                                "orc-system/pods/orc-controller-manager-6cb597b5d4-glhcz",
                                "openstack/rolebindings/nova-nova-cell-setup",
                                "openstack/rolebindings/nova-nova-cell-setup-cron",
                                "openstack/rolebindings/nova-nova-compute",
                                "openstack/rolebindings/nova-nova-conductor",
                                "openstack/rolebindings/nova-nova-db-init",
                                "openstack/rolebindings/nova-nova-db-sync",
                                "openstack/rolebindings/nova-nova-ks-endpoints",
                                "openstack/rolebindings/nova-nova-ks-service",
                                "openstack/rolebindings/nova-nova-ks-user",
                                "openstack/rolebindings/nova-nova-novncproxy",
                                "openstack/rolebindings/nova-nova-rabbit-init",
                                "openstack/rolebindings/nova-nova-scheduler",
                                "openstack/rolebindings/nova-nova-service-cleaner",
                                "openstack/rolebindings/nova-nova-test",
                                "openstack/rolebindings/octavia-octavia-api",
                                "openstack/rolebindings/octavia-octavia-db-init",
                                "openstack/rolebindings/octavia-octavia-db-sync",
                                "openstack/rolebindings/octavia-octavia-health-manager",
                                "openstack/rolebindings/octavia-octavia-housekeeping",
                                "openstack/rolebindings/octavia-octavia-ks-endpoints",
                                "openstack/rolebindings/octavia-octavia-ks-service",
                                "openstack/rolebindings/octavia-octavia-ks-user",
                                "openstack/rolebindings/octavia-octavia-rabbit-init",
                                "openstack/rolebindings/octavia-octavia-worker",
                                "openstack/rolebindings/ovn-controller",
                                "openstack/rolebindings/ovn-northd",
                                "openstack/rolebindings/ovn-ovn-controller",
                                "openstack/rolebindings/ovn-ovn-northd",
                                "openstack/rolebindings/ovn-ovsdb",
                                "openstack/rolebindings/placement-placement-api",
                                "openstack/rolebindings/placement-placement-db-init",
                                "openstack/rolebindings/placement-placement-db-sync",
                                "openstack/rolebindings/placement-placement-ks-endpoints",
                                "openstack/rolebindings/placement-placement-ks-service",
                                "openstack/rolebindings/placement-placement-ks-user",
                                "openstack/rolebindings/pxc-operator",
                                "openstack/rolebindings/rabbitmq-barbican-server",
                                "openstack/rolebindings/rabbitmq-cinder-server",
                                "openstack/rolebindings/rabbitmq-cluster-operator",
                                "openstack/rolebindings/rabbitmq-glance-server",
                                "openstack/rolebindings/rabbitmq-heat-server",
                                "openstack/rolebindings/rabbitmq-keystone-server",
                                "openstack/rolebindings/rabbitmq-magnum-server",
                                "openstack/rolebindings/rabbitmq-manila-server",
                                "openstack/rolebindings/rabbitmq-messaging-topology-operator",
                                "openstack/rolebindings/rabbitmq-neutron-server",
                                "openstack/rolebindings/rabbitmq-nova-server",
                                "openstack/rolebindings/rabbitmq-octavia-server",
                                "openstack/rolebindings/rook-ceph-cluster-mgmt",
                                "openstack/rolebindings/rook-ceph-cmd-reporter",
                                "openstack/rolebindings/rook-ceph-mgr",
                                "openstack/rolebindings/rook-ceph-osd",
                                "openstack/rolebindings/rook-ceph-purge-osd",
                                "openstack/rolebindings/staffeln-conductor-leases",
                                "openstack/rolebindings/staffeln-staffeln-api",
                                "openstack/rolebindings/staffeln-staffeln-conductor",
                                "openstack/rolebindings/staffeln-staffeln-db-init",
                                "openstack/rolebindings/staffeln-staffeln-db-sync",
                                "openstack/rolebindings/tempest-tempest-ks-user",
                                "openstack/rolebindings/tempest-tempest-run-tests",
                                "openstack/roles/barbican-openstack-barbican-api",
                                "openstack/roles/barbican-openstack-barbican-db-init",
                                "openstack/roles/barbican-openstack-barbican-db-sync",
                                "openstack/roles/barbican-openstack-barbican-ks-endpoints",
                                "openstack/roles/barbican-openstack-barbican-ks-service",
                                "openstack/roles/barbican-openstack-barbican-ks-user",
                                "openstack/roles/barbican-openstack-barbican-rabbit-init",
                                "openstack/roles/cinder-backup-storage-init",
                                "openstack/roles/cinder-openstack-cinder-api",
                                "openstack/roles/cinder-openstack-cinder-backup",
                                "openstack/roles/cinder-openstack-cinder-bootstrap",
                                "openstack/roles/cinder-openstack-cinder-create-internal-tenant",
                                "openstack/roles/cinder-openstack-cinder-db-init",
                                "openstack/roles/cinder-openstack-cinder-db-sync",
                                "openstack/roles/cinder-openstack-cinder-ks-endpoints",
                                "openstack/roles/cinder-openstack-cinder-ks-service",
                                "openstack/roles/cinder-openstack-cinder-ks-user",
                                "openstack/roles/cinder-openstack-cinder-rabbit-init",
                                "openstack/roles/cinder-openstack-cinder-scheduler",
                                "openstack/roles/cinder-openstack-cinder-test",
                                "openstack/roles/cinder-openstack-cinder-volume",
                                "openstack/roles/cinder-openstack-cinder-volume-usage-audit",
                                "openstack/roles/cinder-storage-init",
                                "openstack/roles/glance-openstack-glance-api",
                                "openstack/roles/glance-openstack-glance-db-init",
                                "openstack/roles/glance-openstack-glance-db-sync",
                                "openstack/roles/glance-openstack-glance-ks-endpoints",
                                "openstack/roles/glance-openstack-glance-ks-service",
                                "openstack/roles/glance-openstack-glance-ks-user",
                                "openstack/roles/glance-openstack-glance-metadefs-load",
                                "openstack/roles/glance-openstack-glance-rabbit-init",
                                "openstack/roles/glance-openstack-glance-storage-init",
                                "openstack/roles/glance-openstack-glance-test",
                                "openstack/roles/glance-storage-init",
                                "openstack/roles/heat-openstack-heat-api",
                                "openstack/roles/heat-openstack-heat-bootstrap",
                                "openstack/roles/heat-openstack-heat-cfn",
                                "openstack/roles/heat-openstack-heat-db-init",
                                "openstack/roles/heat-openstack-heat-db-sync",
                                "openstack/roles/heat-openstack-heat-engine",
                                "openstack/roles/heat-openstack-heat-engine-cleaner",
                                "openstack/roles/heat-openstack-heat-ks-endpoints",
                                "openstack/roles/heat-openstack-heat-ks-service",
                                "openstack/roles/heat-openstack-heat-ks-user",
                                "openstack/roles/heat-openstack-heat-ks-user-domain",
                                "openstack/roles/heat-openstack-heat-purge-deleted",
                                "openstack/roles/heat-openstack-heat-rabbit-init",
                                "openstack/roles/heat-openstack-heat-test",
                                "openstack/roles/heat-openstack-heat-trusts",
                                "openstack/roles/horizon-openstack-horizon",
                                "openstack/roles/horizon-openstack-horizon-db-init",
                                "openstack/roles/horizon-openstack-horizon-db-sync",
                                "openstack/roles/horizon-openstack-horizon-test",
                                "openstack/roles/keepalived",
                                "openstack/roles/keystone-credential-rotate",
                                "openstack/roles/keystone-credential-setup",
                                "openstack/roles/keystone-fernet-rotate",
                                "openstack/roles/keystone-fernet-setup",
                                "openstack/roles/keystone-openstack-keystone-api",
                                "openstack/roles/keystone-openstack-keystone-bootstrap",
                                "openstack/roles/keystone-openstack-keystone-credential-rotate",
                                "openstack/roles/keystone-openstack-keystone-db-init",
                                "openstack/roles/keystone-openstack-keystone-db-sync",
                                "openstack/roles/keystone-openstack-keystone-domain-manage",
                                "openstack/roles/keystone-openstack-keystone-fernet-rotate",
                                "openstack/roles/keystone-openstack-keystone-test",
                                "openstack/roles/libvirt-cert-manager",
                                "openstack/roles/libvirt-openstack-libvirt",
                                "openstack/roles/magnum-openstack-magnum-api",
                                "openstack/roles/magnum-openstack-magnum-conductor",
                                "openstack/roles/magnum-openstack-magnum-db-init",
                                "openstack/roles/magnum-openstack-magnum-db-sync",
                                "openstack/roles/magnum-openstack-magnum-ks-endpoints",
                                "openstack/roles/magnum-openstack-magnum-ks-service",
                                "openstack/roles/magnum-openstack-magnum-ks-user",
                                "openstack/roles/magnum-openstack-magnum-ks-user-domain",
                                "openstack/roles/magnum-openstack-magnum-rabbit-init",
                                "openstack/roles/manila-openstack-manila-api",
                                "openstack/roles/manila-openstack-manila-data",
                                "openstack/roles/manila-openstack-manila-db-init",
                                "openstack/roles/manila-openstack-manila-db-sync",
                                "openstack/roles/manila-openstack-manila-ks-endpoints",
                                "openstack/roles/manila-openstack-manila-ks-service",
                                "openstack/roles/manila-openstack-manila-ks-user",
                                "openstack/roles/manila-openstack-manila-rabbit-init",
                                "openstack/roles/manila-openstack-manila-scheduler",
                                "openstack/roles/manila-openstack-manila-share",
                                "openstack/roles/neutron-openstack-neutron-db-init",
                                "openstack/roles/neutron-openstack-neutron-db-sync",
                                "openstack/roles/neutron-openstack-neutron-ks-endpoints",
                                "openstack/roles/neutron-openstack-neutron-ks-service",
                                "openstack/roles/neutron-openstack-neutron-ks-user",
                                "openstack/roles/neutron-openstack-neutron-ovn-metadata-agent",
                                "openstack/roles/neutron-openstack-neutron-rabbit-init",
                                "openstack/roles/neutron-openstack-neutron-server",
                                "openstack/roles/neutron-openstack-neutron-test",
                                "openstack/roles/nova-openstack-nova-api-metadata",
                                "openstack/roles/nova-openstack-nova-api-osapi",
                                "openstack/roles/nova-openstack-nova-bootstrap",
                                "openstack/roles/nova-openstack-nova-cell-setup",
                                "openstack/roles/nova-openstack-nova-cell-setup-cron",
                                "openstack/roles/nova-openstack-nova-compute",
                                "openstack/roles/nova-openstack-nova-conductor",
                                "openstack/roles/nova-openstack-nova-db-init",
                                "openstack/roles/nova-openstack-nova-db-sync",
                                "openstack/roles/nova-openstack-nova-ks-endpoints",
                                "openstack/roles/nova-openstack-nova-ks-service",
                                "openstack/roles/nova-openstack-nova-ks-user",
                                "openstack/roles/nova-openstack-nova-novncproxy",
                                "openstack/roles/nova-openstack-nova-rabbit-init",
                                "openstack/roles/nova-openstack-nova-scheduler",
                                "openstack/roles/nova-openstack-nova-service-cleaner",
                                "openstack/roles/nova-openstack-nova-test",
                                "openstack/roles/octavia-openstack-octavia-api",
                                "openstack/roles/octavia-openstack-octavia-db-init",
                                "openstack/roles/octavia-openstack-octavia-db-sync",
                                "openstack/roles/octavia-openstack-octavia-health-manager",
                                "openstack/roles/octavia-openstack-octavia-housekeeping",
                                "openstack/roles/octavia-openstack-octavia-ks-endpoints",
                                "openstack/roles/octavia-openstack-octavia-ks-service",
                                "openstack/roles/octavia-openstack-octavia-ks-user",
                                "openstack/roles/octavia-openstack-octavia-rabbit-init",
                                "openstack/roles/octavia-openstack-octavia-worker",
                                "openstack/roles/ovn-controller",
                                "openstack/roles/ovn-northd",
                                "openstack/roles/ovn-openstack-ovn-controller",
                                "openstack/roles/ovn-openstack-ovn-northd",
                                "openstack/roles/ovn-ovsdb",
                                "openstack/roles/placement-openstack-placement-api",
                                "openstack/roles/placement-openstack-placement-db-init",
                                "openstack/roles/placement-openstack-placement-db-sync",
                                "openstack/roles/placement-openstack-placement-ks-endpoints",
                                "openstack/roles/placement-openstack-placement-ks-service",
                                "openstack/roles/placement-openstack-placement-ks-user",
                                "openstack/roles/pxc-operator",
                                "openstack/roles/rabbitmq-barbican-peer-discovery",
                                "openstack/roles/rabbitmq-cinder-peer-discovery",
                                "openstack/roles/rabbitmq-cluster-operator",
                                "openstack/roles/rabbitmq-glance-peer-discovery",
                                "openstack/roles/rabbitmq-heat-peer-discovery",
                                "openstack/roles/rabbitmq-keystone-peer-discovery",
                                "openstack/roles/rabbitmq-magnum-peer-discovery",
                                "openstack/roles/rabbitmq-manila-peer-discovery",
                                "openstack/roles/rabbitmq-messaging-topology-operator",
                                "openstack/roles/rabbitmq-neutron-peer-discovery",
                                "openstack/roles/rabbitmq-nova-peer-discovery",
                                "openstack/roles/rabbitmq-octavia-peer-discovery",
                                "openstack/roles/rook-ceph-cmd-reporter",
                                "openstack/roles/rook-ceph-mgr",
                                "openstack/roles/rook-ceph-osd",
                                "openstack/roles/rook-ceph-purge-osd",
                                "openstack/roles/staffeln-conductor",
                                "openstack/roles/staffeln-openstack-staffeln-api",
                                "openstack/roles/staffeln-openstack-staffeln-conductor",
                                "openstack/roles/staffeln-openstack-staffeln-db-init",
                                "openstack/roles/staffeln-openstack-staffeln-db-sync",
                                "openstack/roles/tempest-openstack-tempest-ks-user",
                                "openstack/roles/tempest-openstack-tempest-run-tests",
                                "openstack/secrets/barbican-api-certs",
                                "openstack/secrets/barbican-db-admin",
                                "openstack/secrets/barbican-db-user",
                                "openstack/secrets/barbican-etc",
                                "openstack/secrets/barbican-keystone-admin",
                                "openstack/secrets/barbican-keystone-user",
                                "openstack/secrets/barbican-rabbitmq-admin",
                                "openstack/secrets/barbican-rabbitmq-user",
                                "openstack/secrets/cinder-api-certs",
                                "openstack/secrets/cinder-backup-rbd-keyring",
                                "openstack/secrets/cinder-db-admin",
                                "openstack/secrets/cinder-db-user",
                                "openstack/secrets/cinder-etc",
                                "openstack/secrets/cinder-keystone-admin",
                                "openstack/secrets/cinder-keystone-test",
                                "openstack/serviceaccounts/barbican-api",
                                "openstack/secrets/cinder-keystone-user",
                                "openstack/serviceaccounts/barbican-db-init",
                                "openstack/secrets/cinder-rabbitmq-admin",
                                "openstack/serviceaccounts/barbican-db-sync",
                                "openstack/secrets/cinder-rabbitmq-user",
                                "openstack/serviceaccounts/barbican-ks-endpoints",
                                "openstack/secrets/cinder-volume-rbd-keyring",
                                "openstack/secrets/glance-api-certs",
                                "openstack/serviceaccounts/barbican-ks-service",
                                "openstack/secrets/glance-db-admin",
                                "openstack/serviceaccounts/barbican-ks-user",
                                "openstack/secrets/glance-db-user",
                                "openstack/serviceaccounts/barbican-rabbit-init",
                                "openstack/secrets/glance-etc",
                                "openstack/serviceaccounts/barbican-test",
                                "openstack/secrets/glance-keystone-admin",
                                "openstack/serviceaccounts/cinder-api",
                                "openstack/secrets/glance-keystone-test",
                                "openstack/serviceaccounts/cinder-backup",
                                "openstack/secrets/glance-keystone-user",
                                "openstack/serviceaccounts/cinder-backup-storage-init",
                                "openstack/secrets/glance-rabbitmq-admin",
                                "openstack/serviceaccounts/cinder-bootstrap",
                                "openstack/secrets/glance-rabbitmq-user",
                                "openstack/serviceaccounts/cinder-create-internal-tenant",
                                "openstack/secrets/heat-api-certs",
                                "openstack/secrets/heat-cfn-certs",
                                "openstack/serviceaccounts/cinder-db-init",
                                "openstack/serviceaccounts/cinder-db-sync",
                                "openstack/serviceaccounts/cinder-ks-endpoints",
                                "openstack/serviceaccounts/cinder-ks-service",
                                "openstack/serviceaccounts/cinder-ks-user",
                                "openstack/serviceaccounts/cinder-rabbit-init",
                                "openstack/serviceaccounts/cinder-scheduler",
                                "openstack/serviceaccounts/cinder-storage-init",
                                "openstack/serviceaccounts/cinder-test",
                                "openstack/serviceaccounts/cinder-volume",
                                "openstack/serviceaccounts/cinder-volume-usage-audit",
                                "openstack/serviceaccounts/default",
                                "openstack/serviceaccounts/glance-api",
                                "openstack/serviceaccounts/glance-db-init",
                                "openstack/serviceaccounts/glance-db-sync",
                                "openstack/serviceaccounts/glance-ks-endpoints",
                                "openstack/serviceaccounts/glance-ks-service",
                                "openstack/serviceaccounts/glance-ks-user",
                                "openstack/serviceaccounts/glance-metadefs-load",
                                "openstack/serviceaccounts/glance-rabbit-init",
                                "openstack/serviceaccounts/glance-storage-init",
                                "openstack/serviceaccounts/glance-test",
                                "openstack/serviceaccounts/heat-api",
                                "openstack/serviceaccounts/heat-bootstrap",
                                "openstack/serviceaccounts/heat-cfn",
                                "openstack/serviceaccounts/heat-db-init",
                                "openstack/serviceaccounts/heat-db-sync",
                                "openstack/serviceaccounts/heat-engine",
                                "openstack/serviceaccounts/heat-engine-cleaner",
                                "openstack/serviceaccounts/heat-ks-endpoints",
                                "openstack/serviceaccounts/heat-ks-service",
                                "openstack/serviceaccounts/heat-ks-user",
                                "openstack/serviceaccounts/heat-ks-user-domain",
                                "openstack/serviceaccounts/heat-purge-deleted",
                                "openstack/serviceaccounts/heat-rabbit-init",
                                "openstack/serviceaccounts/heat-test",
                                "openstack/serviceaccounts/heat-trusts",
                                "openstack/serviceaccounts/horizon",
                                "openstack/serviceaccounts/horizon-db-init",
                                "openstack/serviceaccounts/horizon-db-sync",
                                "openstack/serviceaccounts/horizon-test",
                                "openstack/serviceaccounts/keepalived",
                                "openstack/serviceaccounts/keystone-api",
                                "openstack/serviceaccounts/keystone-bootstrap",
                                "openstack/serviceaccounts/keystone-credential-rotate",
                                "openstack/serviceaccounts/keystone-credential-setup",
                                "openstack/serviceaccounts/keystone-db-init",
                                "openstack/serviceaccounts/keystone-db-sync",
                                "openstack/serviceaccounts/keystone-domain-manage",
                                "openstack/serviceaccounts/keystone-fernet-rotate",
                                "openstack/serviceaccounts/keystone-fernet-setup",
                                "openstack/serviceaccounts/keystone-rabbit-init",
                                "openstack/serviceaccounts/keystone-test",
                                "openstack/serviceaccounts/libvirt",
                                "openstack/serviceaccounts/magnum-api",
                                "openstack/serviceaccounts/magnum-conductor",
                                "openstack/serviceaccounts/magnum-db-init",
                                "openstack/serviceaccounts/magnum-db-sync",
                                "openstack/serviceaccounts/magnum-ks-endpoints",
                                "openstack/serviceaccounts/magnum-ks-service",
                                "openstack/serviceaccounts/magnum-ks-user",
                                "openstack/serviceaccounts/magnum-ks-user-domain",
                                "openstack/serviceaccounts/magnum-rabbit-init",
                                "openstack/serviceaccounts/manila-api",
                                "openstack/serviceaccounts/manila-bootstrap",
                                "openstack/serviceaccounts/manila-data",
                                "openstack/serviceaccounts/manila-db-init",
                                "openstack/serviceaccounts/manila-db-sync",
                                "openstack/serviceaccounts/manila-ks-endpoints",
                                "openstack/serviceaccounts/manila-ks-service",
                                "openstack/serviceaccounts/manila-ks-user",
                                "openstack/secrets/heat-db-admin",
                                "openstack/secrets/heat-db-user",
                                "openstack/secrets/heat-etc",
                                "openstack/secrets/heat-keystone-admin",
                                "openstack/secrets/heat-keystone-stack-user",
                                "openstack/secrets/heat-keystone-test",
                                "openstack/secrets/heat-keystone-trustee",
                                "openstack/secrets/heat-keystone-user",
                                "openstack/secrets/heat-rabbitmq-admin",
                                "openstack/secrets/heat-rabbitmq-user",
                                "openstack/secrets/horizon-db-admin",
                                "openstack/secrets/horizon-db-user",
                                "openstack/secrets/horizon-etc",
                                "openstack/secrets/horizon-int-certs",
                                "openstack/secrets/horizon-keystone-admin",
                                "openstack/secrets/images-rbd-keyring",
                                "openstack/secrets/internal-percona-xtradb",
                                "openstack/secrets/keepalived-etc",
                                "openstack/secrets/keystone-api-certs",
                                "openstack/secrets/keystone-credential-keys",
                                "openstack/secrets/keystone-db-admin",
                                "openstack/secrets/keystone-db-user",
                                "openstack/secrets/keystone-etc",
                                "openstack/secrets/keystone-fernet-keys",
                                "openstack/secrets/keystone-keystone-admin",
                                "openstack/secrets/keystone-keystone-test",
                                "openstack/secrets/keystone-rabbitmq-admin",
                                "openstack/secrets/keystone-rabbitmq-user",
                                "openstack/secrets/libvirt-api-ca",
                                "openstack/secrets/libvirt-etc",
                                "openstack/secrets/libvirt-libvirt-default-6bgrg-api",
                                "openstack/secrets/libvirt-libvirt-default-6bgrg-vnc",
                                "openstack/secrets/libvirt-libvirt-default-etc",
                                "openstack/secrets/libvirt-vnc-ca",
                                "openstack/secrets/magnum-api-certs",
                                "openstack/secrets/magnum-db-admin",
                                "openstack/secrets/magnum-db-user",
                                "openstack/secrets/magnum-etc",
                                "openstack/secrets/magnum-keystone-admin",
                                "openstack/secrets/magnum-keystone-stack-user",
                                "openstack/secrets/magnum-keystone-user",
                                "openstack/secrets/magnum-rabbitmq-admin",
                                "openstack/secrets/magnum-rabbitmq-user",
                                "openstack/secrets/magnum-registry-certs",
                                "openstack/secrets/manila-api-certs",
                                "openstack/secrets/manila-db-admin",
                                "openstack/secrets/manila-db-user",
                                "openstack/secrets/manila-etc",
                                "openstack/secrets/manila-keystone-admin",
                                "openstack/secrets/manila-keystone-user",
                                "openstack/secrets/manila-rabbitmq-admin",
                                "openstack/secrets/manila-rabbitmq-user",
                                "openstack/secrets/manila-ssh-keys",
                                "openstack/secrets/neutron-db-admin",
                                "openstack/secrets/neutron-db-user",
                                "openstack/secrets/neutron-etc",
                                "openstack/secrets/neutron-keystone-admin",
                                "openstack/secrets/neutron-keystone-test",
                                "openstack/secrets/neutron-keystone-user",
                                "openstack/secrets/neutron-netns-cleanup-cron-default",
                                "openstack/secrets/neutron-ovn-metadata-agent-default",
                                "openstack/secrets/neutron-rabbitmq-admin",
                                "openstack/secrets/neutron-rabbitmq-user",
                                "openstack/secrets/neutron-server-certs",
                                "openstack/secrets/nova-api-certs",
                                "openstack/secrets/nova-compute-default",
                                "openstack/secrets/nova-db-admin",
                                "openstack/secrets/nova-db-api-admin",
                                "openstack/secrets/nova-db-api-user",
                                "openstack/secrets/nova-db-cell0-admin",
                                "openstack/secrets/nova-db-cell0-user",
                                "openstack/secrets/nova-db-user",
                                "openstack/secrets/nova-etc",
                                "openstack/secrets/nova-keystone-admin",
                                "openstack/secrets/nova-keystone-test",
                                "openstack/secrets/nova-keystone-user",
                                "openstack/secrets/nova-novncproxy-certs",
                                "openstack/secrets/nova-novncproxy-vencrypt",
                                "openstack/secrets/nova-rabbitmq-admin",
                                "openstack/secrets/nova-rabbitmq-user",
                                "openstack/secrets/nova-ssh",
                                "openstack/secrets/octavia-amphora-ssh-key",
                                "openstack/secrets/octavia-api-certs",
                                "openstack/secrets/octavia-client-ca",
                                "openstack/secrets/octavia-client-certs",
                                "openstack/secrets/octavia-db-admin",
                                "openstack/secrets/octavia-db-user",
                                "openstack/secrets/octavia-etc",
                                "openstack/secrets/octavia-health-manager-default",
                                "openstack/secrets/octavia-keystone-admin",
                                "openstack/secrets/octavia-keystone-test",
                                "openstack/secrets/octavia-keystone-user",
                                "openstack/serviceaccounts/manila-rabbit-init",
                                "openstack/serviceaccounts/manila-scheduler",
                                "openstack/serviceaccounts/manila-share",
                                "openstack/serviceaccounts/memcached-memcached",
                                "openstack/serviceaccounts/neutron-db-init",
                                "openstack/serviceaccounts/neutron-db-sync",
                                "openstack/serviceaccounts/neutron-ks-endpoints",
                                "openstack/serviceaccounts/neutron-ks-service",
                                "openstack/serviceaccounts/neutron-ks-user",
                                "openstack/serviceaccounts/neutron-netns-cleanup-cron",
                                "openstack/serviceaccounts/neutron-ovn-metadata-agent",
                                "openstack/serviceaccounts/neutron-rabbit-init",
                                "openstack/serviceaccounts/neutron-server",
                                "openstack/serviceaccounts/neutron-test",
                                "openstack/serviceaccounts/nova-api-metadata",
                                "openstack/serviceaccounts/nova-api-osapi",
                                "openstack/serviceaccounts/nova-bootstrap",
                                "openstack/serviceaccounts/nova-cell-setup",
                                "openstack/serviceaccounts/nova-cell-setup-cron",
                                "openstack/serviceaccounts/nova-compute",
                                "openstack/serviceaccounts/nova-conductor",
                                "openstack/serviceaccounts/nova-db-init",
                                "openstack/serviceaccounts/nova-db-sync",
                                "openstack/serviceaccounts/nova-ks-endpoints",
                                "openstack/serviceaccounts/nova-ks-service",
                                "openstack/serviceaccounts/nova-ks-user",
                                "openstack/serviceaccounts/nova-novncproxy",
                                "openstack/serviceaccounts/nova-rabbit-init",
                                "openstack/serviceaccounts/nova-scheduler",
                                "openstack/serviceaccounts/nova-service-cleaner",
                                "openstack/serviceaccounts/nova-test",
                                "openstack/serviceaccounts/octavia-api",
                                "openstack/serviceaccounts/octavia-bootstrap",
                                "openstack/serviceaccounts/octavia-db-init",
                                "openstack/serviceaccounts/octavia-db-sync",
                                "openstack/serviceaccounts/octavia-health-manager",
                                "openstack/serviceaccounts/octavia-housekeeping",
                                "openstack/serviceaccounts/octavia-ks-endpoints",
                                "openstack/serviceaccounts/octavia-ks-service",
                                "openstack/serviceaccounts/octavia-ks-user",
                                "openstack/serviceaccounts/octavia-rabbit-init",
                                "openstack/serviceaccounts/octavia-worker",
                                "openstack/serviceaccounts/openvswitch-server",
                                "openstack/serviceaccounts/ovn-controller",
                                "openstack/serviceaccounts/ovn-northd",
                                "openstack/serviceaccounts/ovn-ovsdb-nb",
                                "openstack/serviceaccounts/ovn-ovsdb-sb",
                                "openstack/serviceaccounts/placement-api",
                                "openstack/serviceaccounts/placement-db-init",
                                "openstack/serviceaccounts/placement-db-sync",
                                "openstack/serviceaccounts/placement-ks-endpoints",
                                "openstack/serviceaccounts/placement-ks-service",
                                "openstack/serviceaccounts/placement-ks-user",
                                "openstack/serviceaccounts/pxc-operator",
                                "openstack/serviceaccounts/rabbitmq-barbican-server",
                                "openstack/serviceaccounts/rabbitmq-cinder-server",
                                "openstack/serviceaccounts/rabbitmq-cluster-operator",
                                "openstack/serviceaccounts/rabbitmq-glance-server",
                                "openstack/serviceaccounts/rabbitmq-heat-server",
                                "openstack/serviceaccounts/rabbitmq-keystone-server",
                                "openstack/serviceaccounts/rabbitmq-magnum-server",
                                "openstack/serviceaccounts/rabbitmq-manila-server",
                                "openstack/serviceaccounts/rabbitmq-messaging-topology-operator",
                                "openstack/serviceaccounts/rabbitmq-neutron-server",
                                "openstack/serviceaccounts/rabbitmq-nova-server",
                                "openstack/serviceaccounts/rabbitmq-octavia-server",
                                "openstack/serviceaccounts/rook-ceph-cmd-reporter",
                                "openstack/serviceaccounts/rook-ceph-default",
                                "openstack/serviceaccounts/rook-ceph-mgr",
                                "openstack/secrets/octavia-persistence-db-admin",
                                "openstack/secrets/octavia-persistence-db-user",
                                "openstack/secrets/octavia-rabbitmq-admin",
                                "openstack/secrets/octavia-rabbitmq-user",
                                "openstack/secrets/octavia-server-ca",
                                "openstack/secrets/openstack-database-exporter-dsn",
                                "openstack/secrets/ovn-etc",
                                "openstack/secrets/ovn-vector-config",
                                "openstack/secrets/percona-xtradb",
                                "openstack/secrets/placement-api-certs",
                                "openstack/secrets/placement-db-admin",
                                "openstack/secrets/placement-db-user",
                                "openstack/secrets/placement-etc",
                                "openstack/secrets/placement-keystone-admin",
                                "openstack/secrets/placement-keystone-user",
                                "openstack/secrets/pvc-ceph-client-key",
                                "openstack/secrets/rabbitmq-barbican-default-user",
                                "openstack/secrets/rabbitmq-barbican-erlang-cookie",
                                "openstack/secrets/rabbitmq-cinder-default-user",
                                "openstack/secrets/rabbitmq-cinder-erlang-cookie",
                                "openstack/secrets/rabbitmq-glance-default-user",
                                "openstack/secrets/rabbitmq-glance-erlang-cookie",
                                "openstack/secrets/rabbitmq-heat-default-user",
                                "openstack/secrets/rabbitmq-heat-erlang-cookie",
                                "openstack/secrets/rabbitmq-keystone-default-user",
                                "openstack/secrets/rabbitmq-keystone-erlang-cookie",
                                "openstack/secrets/rabbitmq-magnum-default-user",
                                "openstack/secrets/rabbitmq-magnum-erlang-cookie",
                                "openstack/secrets/rabbitmq-manila-default-user",
                                "openstack/secrets/rabbitmq-manila-erlang-cookie",
                                "openstack/secrets/rabbitmq-messaging-topology-operator-webhook",
                                "openstack/secrets/rabbitmq-neutron-default-user",
                                "openstack/secrets/rabbitmq-neutron-erlang-cookie",
                                "openstack/secrets/rabbitmq-nova-default-user",
                                "openstack/secrets/rabbitmq-nova-erlang-cookie",
                                "openstack/secrets/rabbitmq-octavia-default-user",
                                "openstack/secrets/rabbitmq-octavia-erlang-cookie",
                                "openstack/secrets/rook-ceph-config",
                                "openstack/secrets/rook-ceph-crash-collector-keyring",
                                "openstack/secrets/rook-ceph-mon",
                                "openstack/secrets/rook-ceph-object-user-ceph-cosi",
                                "openstack/secrets/rook-ceph-rgw-ceph-a-keyring",
                                "openstack/secrets/rook-ceph-rgw-ceph-certs",
                                "openstack/secrets/rook-csi-cephfs-node",
                                "openstack/secrets/rook-csi-cephfs-provisioner",
                                "openstack/secrets/rook-csi-rbd-node",
                                "openstack/secrets/rook-csi-rbd-provisioner",
                                "openstack/secrets/sh.helm.release.v1.barbican.v1",
                                "openstack/secrets/sh.helm.release.v1.ceph-provisioners.v1",
                                "openstack/secrets/sh.helm.release.v1.ceph.v1",
                                "openstack/secrets/sh.helm.release.v1.cinder.v1",
                                "openstack/secrets/sh.helm.release.v1.glance.v1",
                                "openstack/secrets/sh.helm.release.v1.heat.v1",
                                "openstack/secrets/sh.helm.release.v1.horizon.v1",
                                "openstack/secrets/sh.helm.release.v1.keystone.v1",
                                "openstack/secrets/sh.helm.release.v1.libvirt.v1",
                                "openstack/secrets/sh.helm.release.v1.magnum.v1",
                                "openstack/secrets/sh.helm.release.v1.manila.v1",
                                "openstack/secrets/sh.helm.release.v1.memcached.v1",
                                "openstack/secrets/sh.helm.release.v1.neutron.v1",
                                "openstack/secrets/sh.helm.release.v1.nova.v1",
                                "openstack/secrets/sh.helm.release.v1.octavia.v1",
                                "openstack/secrets/sh.helm.release.v1.openvswitch.v1",
                                "openstack/secrets/sh.helm.release.v1.ovn.v1",
                                "openstack/secrets/sh.helm.release.v1.placement.v1",
                                "openstack/secrets/sh.helm.release.v1.pxc-operator.v1",
                                "openstack/secrets/sh.helm.release.v1.rabbitmq-cluster-operator.v1",
                                "openstack/secrets/sh.helm.release.v1.staffeln.v1",
                                "openstack/secrets/sh.helm.release.v1.tempest.v1",
                                "openstack/secrets/sh.helm.release.v1.valkey.v1",
                                "openstack/secrets/staffeln-db-admin",
                                "openstack/secrets/staffeln-db-user",
                                "openstack/secrets/staffeln-etc",
                                "openstack/secrets/tempest-etc",
                                "openstack/secrets/tempest-keystone-admin",
                                "openstack/secrets/tempest-keystone-user",
                                "openstack/secrets/valkey-ca",
                                "openstack/secrets/valkey-server-certs",
                                "openstack/serviceaccounts/rook-ceph-osd",
                                "openstack/serviceaccounts/rook-ceph-purge-osd",
                                "openstack/serviceaccounts/rook-ceph-rgw",
                                "openstack/serviceaccounts/staffeln-api",
                                "openstack/serviceaccounts/staffeln-conductor",
                                "openstack/serviceaccounts/staffeln-db-init",
                                "openstack/serviceaccounts/staffeln-db-sync",
                                "openstack/serviceaccounts/tempest-ks-user",
                                "openstack/serviceaccounts/tempest-run-tests",
                                "openstack/serviceaccounts/valkey",
                                "openstack/services/barbican-api",
                                "openstack/services/ceph-mon",
                                "openstack/services/cinder-api",
                                "openstack/services/glance-api",
                                "openstack/services/heat-api",
                                "openstack/services/heat-cfn",
                                "openstack/services/horizon",
                                "openstack/services/horizon-int",
                                "openstack/services/keystone-api",
                                "openstack/services/magnum-api",
                                "openstack/services/magnum-registry",
                                "openstack/services/manila-api",
                                "openstack/services/memcached",
                                "openstack/services/memcached-metrics",
                                "openstack/services/neutron-server",
                                "openstack/services/nova-api",
                                "openstack/services/nova-metadata",
                                "openstack/services/nova-novncproxy",
                                "openstack/services/octavia-api",
                                "openstack/services/openstack-exporter",
                                "openstack/services/ovn-ovsdb-nb",
                                "openstack/services/ovn-ovsdb-sb",
                                "openstack/services/percona-xtradb-cluster-operator",
                                "openstack/services/percona-xtradb-haproxy",
                                "openstack/services/percona-xtradb-haproxy-metrics",
                                "openstack/services/percona-xtradb-haproxy-replicas",
                                "openstack/services/percona-xtradb-pxc",
                                "openstack/services/percona-xtradb-pxc-unready",
                                "openstack/services/placement-api",
                                "openstack/services/rabbitmq-barbican",
                                "openstack/services/rabbitmq-barbican-nodes",
                                "openstack/services/rabbitmq-cinder",
                                "openstack/services/rabbitmq-cinder-nodes",
                                "openstack/services/rabbitmq-glance",
                                "openstack/services/rabbitmq-glance-nodes",
                                "openstack/services/rabbitmq-heat",
                                "openstack/services/rabbitmq-heat-nodes",
                                "openstack/services/rabbitmq-keystone",
                                "openstack/services/rabbitmq-keystone-nodes",
                                "openstack/services/rabbitmq-magnum",
                                "openstack/services/rabbitmq-magnum-nodes",
                                "openstack/services/rabbitmq-manila",
                                "openstack/services/rabbitmq-manila-nodes",
                                "openstack/services/rabbitmq-messaging-topology-operator-webhook",
                                "openstack/services/rabbitmq-neutron",
                                "openstack/services/rabbitmq-neutron-nodes",
                                "openstack/services/rabbitmq-nova",
                                "openstack/services/rabbitmq-nova-nodes",
                                "openstack/services/rabbitmq-octavia",
                                "openstack/services/rabbitmq-octavia-nodes",
                                "openstack/services/rook-ceph-rgw-ceph",
                                "openstack/services/staffeln-api",
                                "openstack/services/valkey",
                                "openstack/services/valkey-headless",
                                "openstack/services/valkey-metrics",
                                "openstack/statefulsets/magnum-conductor",
                                "openstack/statefulsets/ovn-ovsdb-nb",
                                "openstack/statefulsets/ovn-ovsdb-sb",
                                "openstack/statefulsets/percona-xtradb-haproxy",
                                "openstack/statefulsets/percona-xtradb-pxc",
                                "openstack/statefulsets/rabbitmq-barbican-server",
                                "openstack/statefulsets/rabbitmq-cinder-server",
                                "openstack/statefulsets/rabbitmq-glance-server",
                                "openstack/statefulsets/rabbitmq-heat-server",
                                "openstack/statefulsets/rabbitmq-keystone-server",
                                "openstack/statefulsets/rabbitmq-magnum-server",
                                "openstack/statefulsets/rabbitmq-manila-server",
                                "openstack/statefulsets/rabbitmq-neutron-server",
                                "openstack/statefulsets/rabbitmq-nova-server",
                                "openstack/statefulsets/rabbitmq-octavia-server",
                                "openstack/statefulsets/valkey-node",
                                "orc-system/roles/orc-leader-election-role",
                                "orc-system/serviceaccounts/default",
                                "orc-system/rolebindings/orc-leader-election-rolebinding",
                                "orc-system/services/orc-controller-manager-metrics-service",
                                "rook-ceph/configmaps/kube-root-ca.crt",
                                "rook-ceph/configmaps/rook-ceph-csi-config",
                                "rook-ceph/configmaps/rook-ceph-csi-mapping-config",
                                "rook-ceph/configmaps/rook-ceph-operator-config",
                                "secretgen-controller/configmaps/kube-root-ca.crt",
                                "rook-ceph/deployment/rook-ceph-operator",
                                "secretgen-controller/deployment/secretgen-controller",
                                "rook-ceph/pods/rook-ceph-operator-7b66cfb94c-tj94j",
                                "secretgen-controller/pods/secretgen-controller-5cf976ccc7-szs5h",
                                "rook-ceph/rolebindings/cephfs-csi-provisioner-role-cfg",
                                "rook-ceph/rolebindings/rbd-csi-provisioner-role-cfg",
                                "rook-ceph/rolebindings/rook-ceph-cluster-mgmt",
                                "rook-ceph/rolebindings/rook-ceph-cmd-reporter",
                                "rook-ceph/rolebindings/rook-ceph-mgr",
                                "rook-ceph/rolebindings/rook-ceph-mgr-system",
                                "rook-ceph/rolebindings/rook-ceph-mgr-system-openstack",
                                "rook-ceph/rolebindings/rook-ceph-osd",
                                "rook-ceph/rolebindings/rook-ceph-purge-osd",
                                "rook-ceph/rolebindings/rook-ceph-system",
                                "rook-ceph/roles/cephfs-external-provisioner-cfg",
                                "rook-ceph/roles/rbd-external-provisioner-cfg",
                                "rook-ceph/roles/rook-ceph-cmd-reporter",
                                "rook-ceph/roles/rook-ceph-mgr",
                                "rook-ceph/roles/rook-ceph-osd",
                                "rook-ceph/roles/rook-ceph-purge-osd",
                                "rook-ceph/roles/rook-ceph-system",
                                "rook-ceph/secrets/sh.helm.release.v1.rook-ceph.v1",
                                "secretgen-controller/serviceaccounts/default",
                                "secretgen-controller/serviceaccounts/secretgen-controller-sa",
                                "rook-ceph/serviceaccounts/default",
                                "rook-ceph/serviceaccounts/objectstorage-provisioner",
                                "rook-ceph/serviceaccounts/rook-ceph-cmd-reporter",
                                "rook-ceph/serviceaccounts/rook-ceph-default",
                                "rook-ceph/serviceaccounts/rook-ceph-mgr",
                                "rook-ceph/serviceaccounts/rook-ceph-osd",
                                "rook-ceph/serviceaccounts/rook-ceph-purge-osd",
                                "rook-ceph/serviceaccounts/rook-ceph-rgw",
                                "rook-ceph/serviceaccounts/rook-ceph-system",
                                "rook-ceph/serviceaccounts/rook-csi-cephfs-plugin-sa",
                                "rook-ceph/serviceaccounts/rook-csi-cephfs-provisioner-sa",
                                "rook-ceph/serviceaccounts/rook-csi-rbd-plugin-sa",
                                "rook-ceph/serviceaccounts/rook-csi-rbd-provisioner-sa",
                                "orc-system/serviceaccounts/orc-controller-manager"
                            ],
                            "zuul_log_id": "0242ac17-0011-aaa1-afc9-00000000001c-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-000000000017",
                        "name": "describe-kubernetes-objects",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/describe-kubernetes-objects"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:28:56.787692Z",
                            "start": "2026-03-01T22:25:39.441714Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-00000000001c",
                        "name": "Gathering descriptions for namespace scoped objects"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "changed": true,
                            "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --rsh='/usr/bin/ssh -S none -o Port=22 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' --rsync-path='sudo -u root rsync' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.156:/tmp/logs/objects /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/instance",
                            "invocation": {
                                "module_args": {
                                    "_local_rsync_password": null,
                                    "_local_rsync_path": "rsync",
                                    "_ssh_args": null,
                                    "_substitute_controller": false,
                                    "archive": true,
                                    "checksum": false,
                                    "compress": true,
                                    "copy_links": false,
                                    "delay_updates": true,
                                    "delete": false,
                                    "dest": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/instance",
                                    "dest_port": 22,
                                    "dirs": false,
                                    "existing_only": false,
                                    "group": null,
                                    "link_dest": null,
                                    "links": null,
                                    "mode": "pull",
                                    "owner": null,
                                    "partial": false,
                                    "perms": null,
                                    "private_key": null,
                                    "recursive": null,
                                    "rsync_opts": [],
                                    "rsync_path": "sudo -u root rsync",
                                    "rsync_timeout": 0,
                                    "set_remote_user": true,
                                    "src": "zuul@199.204.45.156:/tmp/logs/objects",
                                    "ssh_connection_multiplexing": false,
                                    "times": null,
                                    "use_ssh_args": false,
                                    "verify_host": false
                                }
                            },
                            "msg": "cd+++++++++ objects/\ncd+++++++++ objects/cluster/\ncd+++++++++ objects/cluster/clusterrole/\n>f+++++++++ objects/cluster/clusterrole/admin.txt\n>f+++++++++ objects/cluster/clusterrole/admin.yaml\n>f+++++++++ objects/cluster/clusterrole/capi-aggregated-manager-role.txt\n>f+++++++++ objects/cluster/clusterrole/capi-aggregated-manager-role.yaml\n>f+++++++++ objects/cluster/clusterrole/capi-kubeadm-bootstrap-manager-role.txt\n>f+++++++++ objects/cluster/clusterrole/capi-kubeadm-bootstrap-manager-role.yaml\n>f+++++++++ objects/cluster/clusterrole/capi-kubeadm-control-plane-aggregated-manager-role.txt\n>f+++++++++ objects/cluster/clusterrole/capi-kubeadm-control-plane-aggregated-manager-role.yaml\n>f+++++++++ objects/cluster/clusterrole/capi-kubeadm-control-plane-manager-role.txt\n>f+++++++++ objects/cluster/clusterrole/capi-kubeadm-control-plane-manager-role.yaml\n>f+++++++++ objects/cluster/clusterrole/capi-manager-role.txt\n>f+++++++++ objects/cluster/clusterrole/capi-manager-role.yaml\n>f+++++++++ objects/cluster/clusterrole/capo-manager-role.txt\n>f+++++++++ objects/cluster/clusterrole/capo-manager-role.yaml\n>f+++++++++ objects/cluster/clusterrole/cephfs-csi-nodeplugin.txt\n>f+++++++++ objects/cluster/clusterrole/cephfs-csi-nodeplugin.yaml\n>f+++++++++ objects/cluster/clusterrole/cephfs-external-provisioner-runner.txt\n>f+++++++++ objects/cluster/clusterrole/cephfs-external-provisioner-runner.yaml\n>f+++++++++ objects/cluster/clusterrole/cert-manager-cainjector.txt\n>f+++++++++ objects/cluster/clusterrole/cert-manager-cainjector.yaml\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-approve:cert-manager-io.txt\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-approve:cert-manager-io.yaml\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-certificates.txt\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-certificates.yaml\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-certificatesigningrequests.txt\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-certificatesigningrequests.yaml\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-challenges.txt\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-challenges.yaml\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-clusterissuers.txt\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-clusterissuers.yaml\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-ingress-shim.txt\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-ingress-shim.yaml\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-issuers.txt\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-issuers.yaml\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-orders.txt\n>f+++++++++ objects/cluster/clusterrole/cert-manager-controller-orders.yaml\n>f+++++++++ objects/cluster/clusterrole/cert-manager-edit.txt\n>f+++++++++ objects/cluster/clusterrole/cert-manager-edit.yaml\n>f+++++++++ objects/cluster/clusterrole/cert-manager-view.txt\n>f+++++++++ objects/cluster/clusterrole/cert-manager-view.yaml\n>f+++++++++ objects/cluster/clusterrole/cert-manager-webhook:subjectaccessreviews.txt\n>f+++++++++ objects/cluster/clusterrole/cert-manager-webhook:subjectaccessreviews.yaml\n>f+++++++++ objects/cluster/clusterrole/cilium-operator.txt\n>f+++++++++ objects/cluster/clusterrole/cilium-operator.yaml\n>f+++++++++ objects/cluster/clusterrole/cilium.txt\n>f+++++++++ objects/cluster/clusterrole/cilium.yaml\n>f+++++++++ objects/cluster/clusterrole/cluster-admin.txt\n>f+++++++++ objects/cluster/clusterrole/cluster-admin.yaml\n>f+++++++++ objects/cluster/clusterrole/edit.txt\n>f+++++++++ objects/cluster/clusterrole/edit.yaml\n>f+++++++++ objects/cluster/clusterrole/goldpinger-clusterrole.txt\n>f+++++++++ objects/cluster/clusterrole/goldpinger-clusterrole.yaml\n>f+++++++++ objects/cluster/clusterrole/ingress-nginx.txt\n>f+++++++++ objects/cluster/clusterrole/ingress-nginx.yaml\n>f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-grafana-clusterrole.txt\n>f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-grafana-clusterrole.yaml\n>f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-kube-state-metrics.txt\n>f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-kube-state-metrics.yaml\n>f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-operator.txt\n>f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-operator.yaml\n>f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-prometheus.txt\n>f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-prometheus.yaml\n>f+++++++++ objects/cluster/clusterrole/kubeadm:get-nodes.txt\n>f+++++++++ objects/cluster/clusterrole/kubeadm:get-nodes.yaml\n>f+++++++++ objects/cluster/clusterrole/local-path-provisioner.txt\n>f+++++++++ objects/cluster/clusterrole/local-path-provisioner.yaml\n>f+++++++++ objects/cluster/clusterrole/node-feature-discovery-gc.txt\n>f+++++++++ objects/cluster/clusterrole/node-feature-discovery-gc.yaml\n>f+++++++++ objects/cluster/clusterrole/node-feature-discovery.txt\n>f+++++++++ objects/cluster/clusterrole/node-feature-discovery.yaml\n>f+++++++++ objects/cluster/clusterrole/nova-bootstrap.txt\n>f+++++++++ objects/cluster/clusterrole/nova-bootstrap.yaml\n>f+++++++++ objects/cluster/clusterrole/nova-cell-setup.txt\n>f+++++++++ objects/cluster/clusterrole/nova-cell-setup.yaml\n>f+++++++++ objects/cluster/clusterrole/objectstorage-provisioner-role.txt\n>f+++++++++ objects/cluster/clusterrole/objectstorage-provisioner-role.yaml\n>f+++++++++ objects/cluster/clusterrole/orc-image-editor-role.txt\n>f+++++++++ objects/cluster/clusterrole/orc-image-editor-role.yaml\n>f+++++++++ objects/cluster/clusterrole/orc-image-viewer-role.txt\n>f+++++++++ objects/cluster/clusterrole/orc-image-viewer-role.yaml\n>f+++++++++ objects/cluster/clusterrole/orc-manager-role.txt\n>f+++++++++ objects/cluster/clusterrole/orc-manager-role.yaml\n>f+++++++++ objects/cluster/clusterrole/orc-metrics-auth-role.txt\n>f+++++++++ objects/cluster/clusterrole/orc-metrics-auth-role.yaml\n>f+++++++++ objects/cluster/clusterrole/orc-metrics-reader.txt\n>f+++++++++ objects/cluster/clusterrole/orc-metrics-reader.yaml\n>f+++++++++ objects/cluster/clusterrole/ovn-controller.txt\n>f+++++++++ objects/cluster/clusterrole/ovn-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack-admin.txt\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack-admin.yaml\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack-edit.txt\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack-edit.yaml\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack-view.txt\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack-view.yaml\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack.txt\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack.yaml\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack-admin.txt\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack-admin.yaml\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack-edit.txt\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack-edit.yaml\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack-view.txt\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack-view.yaml\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack.txt\n>f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack.yaml\n>f+++++++++ objects/cluster/clusterrole/rbd-csi-nodeplugin.txt\n>f+++++++++ objects/cluster/clusterrole/rbd-csi-nodeplugin.yaml\n>f+++++++++ objects/cluster/clusterrole/rbd-external-provisioner-runner.txt\n>f+++++++++ objects/cluster/clusterrole/rbd-external-provisioner-runner.yaml\n>f+++++++++ objects/cluster/clusterrole/rook-ceph-cluster-mgmt.txt\n>f+++++++++ objects/cluster/clusterrole/rook-ceph-cluster-mgmt.yaml\n>f+++++++++ objects/cluster/clusterrole/rook-ceph-global.txt\n>f+++++++++ objects/cluster/clusterrole/rook-ceph-global.yaml\n>f+++++++++ objects/cluster/clusterrole/rook-ceph-mgr-cluster.txt\n>f+++++++++ objects/cluster/clusterrole/rook-ceph-mgr-cluster.yaml\n>f+++++++++ objects/cluster/clusterrole/rook-ceph-mgr-system.txt\n>f+++++++++ objects/cluster/clusterrole/rook-ceph-mgr-system.yaml\n>f+++++++++ objects/cluster/clusterrole/rook-ceph-object-bucket.txt\n>f+++++++++ objects/cluster/clusterrole/rook-ceph-object-bucket.yaml\n>f+++++++++ objects/cluster/clusterrole/rook-ceph-osd.txt\n>f+++++++++ objects/cluster/clusterrole/rook-ceph-osd.yaml\n>f+++++++++ objects/cluster/clusterrole/rook-ceph-system.txt\n>f+++++++++ objects/cluster/clusterrole/rook-ceph-system.yaml\n>f+++++++++ objects/cluster/clusterrole/secretgen-controller-cluster-role.txt\n>f+++++++++ objects/cluster/clusterrole/secretgen-controller-cluster-role.yaml\n>f+++++++++ objects/cluster/clusterrole/system:aggregate-to-admin.txt\n>f+++++++++ objects/cluster/clusterrole/system:aggregate-to-admin.yaml\n>f+++++++++ objects/cluster/clusterrole/system:aggregate-to-edit.txt\n>f+++++++++ objects/cluster/clusterrole/system:aggregate-to-edit.yaml\n>f+++++++++ objects/cluster/clusterrole/system:aggregate-to-view.txt\n>f+++++++++ objects/cluster/clusterrole/system:aggregate-to-view.yaml\n>f+++++++++ objects/cluster/clusterrole/system:auth-delegator.txt\n>f+++++++++ objects/cluster/clusterrole/system:auth-delegator.yaml\n>f+++++++++ objects/cluster/clusterrole/system:basic-user.txt\n>f+++++++++ objects/cluster/clusterrole/system:basic-user.yaml\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:nodeclient.txt\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:nodeclient.yaml\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient.txt\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient.yaml\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-approver.txt\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-approver.yaml\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-kubelet-approver.txt\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-kubelet-approver.yaml\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kubelet-serving-approver.txt\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kubelet-serving-approver.yaml\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:legacy-unknown-approver.txt\n>f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:legacy-unknown-approver.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:attachdetach-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:attachdetach-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:certificate-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:certificate-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:clusterrole-aggregation-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:clusterrole-aggregation-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:cronjob-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:cronjob-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:daemon-set-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:daemon-set-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:deployment-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:deployment-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:disruption-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:disruption-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:endpoint-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:endpoint-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:endpointslice-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:endpointslice-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:endpointslicemirroring-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:endpointslicemirroring-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:ephemeral-volume-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:ephemeral-volume-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:expand-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:expand-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:generic-garbage-collector.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:generic-garbage-collector.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:horizontal-pod-autoscaler.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:horizontal-pod-autoscaler.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:job-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:job-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:namespace-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:namespace-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:node-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:node-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:persistent-volume-binder.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:persistent-volume-binder.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:pod-garbage-collector.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:pod-garbage-collector.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:pv-protection-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:pv-protection-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:pvc-protection-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:pvc-protection-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:replicaset-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:replicaset-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:replication-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:replication-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:resourcequota-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:resourcequota-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:root-ca-cert-publisher.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:root-ca-cert-publisher.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:route-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:route-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:service-account-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:service-account-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:service-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:service-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:statefulset-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:statefulset-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:ttl-after-finished-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:ttl-after-finished-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:controller:ttl-controller.txt\n>f+++++++++ objects/cluster/clusterrole/system:controller:ttl-controller.yaml\n>f+++++++++ objects/cluster/clusterrole/system:coredns.txt\n>f+++++++++ objects/cluster/clusterrole/system:coredns.yaml\n>f+++++++++ objects/cluster/clusterrole/system:discovery.txt\n>f+++++++++ objects/cluster/clusterrole/system:discovery.yaml\n>f+++++++++ objects/cluster/clusterrole/system:heapster.txt\n>f+++++++++ objects/cluster/clusterrole/system:heapster.yaml\n>f+++++++++ objects/cluster/clusterrole/system:kube-aggregator.txt\n>f+++++++++ objects/cluster/clusterrole/system:kube-aggregator.yaml\n>f+++++++++ objects/cluster/clusterrole/system:kube-controller-manager.txt\n>f+++++++++ objects/cluster/clusterrole/system:kube-controller-manager.yaml\n>f+++++++++ objects/cluster/clusterrole/system:kube-dns.txt\n>f+++++++++ objects/cluster/clusterrole/system:kube-dns.yaml\n>f+++++++++ objects/cluster/clusterrole/system:kube-scheduler.txt\n>f+++++++++ objects/cluster/clusterrole/system:kube-scheduler.yaml\n>f+++++++++ objects/cluster/clusterrole/system:kubelet-api-admin.txt\n>f+++++++++ objects/cluster/clusterrole/system:kubelet-api-admin.yaml\n>f+++++++++ objects/cluster/clusterrole/system:monitoring.txt\n>f+++++++++ objects/cluster/clusterrole/system:monitoring.yaml\n>f+++++++++ objects/cluster/clusterrole/system:node-bootstrapper.txt\n>f+++++++++ objects/cluster/clusterrole/system:node-bootstrapper.yaml\n>f+++++++++ objects/cluster/clusterrole/system:node-problem-detector.txt\n>f+++++++++ objects/cluster/clusterrole/system:node-problem-detector.yaml\n>f+++++++++ objects/cluster/clusterrole/system:node-proxier.txt\n>f+++++++++ objects/cluster/clusterrole/system:node-proxier.yaml\n>f+++++++++ objects/cluster/clusterrole/system:node.txt\n>f+++++++++ objects/cluster/clusterrole/system:node.yaml\n>f+++++++++ objects/cluster/clusterrole/system:persistent-volume-provisioner.txt\n>f+++++++++ objects/cluster/clusterrole/system:persistent-volume-provisioner.yaml\n>f+++++++++ objects/cluster/clusterrole/system:public-info-viewer.txt\n>f+++++++++ objects/cluster/clusterrole/system:public-info-viewer.yaml\n>f+++++++++ objects/cluster/clusterrole/system:service-account-issuer-discovery.txt\n>f+++++++++ objects/cluster/clusterrole/system:service-account-issuer-discovery.yaml\n>f+++++++++ objects/cluster/clusterrole/system:volume-scheduler.txt\n>f+++++++++ objects/cluster/clusterrole/system:volume-scheduler.yaml\n>f+++++++++ objects/cluster/clusterrole/vector.txt\n>f+++++++++ objects/cluster/clusterrole/vector.yaml\n>f+++++++++ objects/cluster/clusterrole/view.txt\n>f+++++++++ objects/cluster/clusterrole/view.yaml\ncd+++++++++ objects/cluster/clusterrolebinding/\n>f+++++++++ objects/cluster/clusterrolebinding/capi-kubeadm-bootstrap-manager-rolebinding.txt\n>f+++++++++ objects/cluster/clusterrolebinding/capi-kubeadm-bootstrap-manager-rolebinding.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/capi-kubeadm-control-plane-manager-rolebinding.txt\n>f+++++++++ objects/cluster/clusterrolebinding/capi-kubeadm-control-plane-manager-rolebinding.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/capi-manager-rolebinding.txt\n>f+++++++++ objects/cluster/clusterrolebinding/capi-manager-rolebinding.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/capo-manager-rolebinding.txt\n>f+++++++++ objects/cluster/clusterrolebinding/capo-manager-rolebinding.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cephfs-csi-nodeplugin-role.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cephfs-csi-nodeplugin-role.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cephfs-csi-provisioner-role.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cephfs-csi-provisioner-role.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-cainjector.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-cainjector.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-approve:cert-manager-io.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-approve:cert-manager-io.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-certificates.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-certificates.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-certificatesigningrequests.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-certificatesigningrequests.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-challenges.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-challenges.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-clusterissuers.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-clusterissuers.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-ingress-shim.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-ingress-shim.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-issuers.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-issuers.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-orders.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-orders.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-webhook:subjectaccessreviews.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cert-manager-webhook:subjectaccessreviews.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cilium-operator.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cilium-operator.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cilium.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cilium.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/cluster-admin.txt\n>f+++++++++ objects/cluster/clusterrolebinding/cluster-admin.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/goldpinger-clusterrolebinding.txt\n>f+++++++++ objects/cluster/clusterrolebinding/goldpinger-clusterrolebinding.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/ingress-nginx.txt\n>f+++++++++ objects/cluster/clusterrolebinding/ingress-nginx.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-grafana-clusterrolebinding.txt\n>f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-grafana-clusterrolebinding.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-kube-state-metrics.txt\n>f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-kube-state-metrics.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-operator.txt\n>f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-operator.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-prometheus.txt\n>f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-prometheus.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:get-nodes.txt\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:get-nodes.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:kubelet-bootstrap.txt\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:kubelet-bootstrap.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-bootstrap.txt\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-bootstrap.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-certificate-rotation.txt\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-certificate-rotation.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-proxier.txt\n>f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-proxier.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/local-path-provisioner.txt\n>f+++++++++ objects/cluster/clusterrolebinding/local-path-provisioner.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/magnum-cluster-api.txt\n>f+++++++++ objects/cluster/clusterrolebinding/magnum-cluster-api.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/node-feature-discovery-gc.txt\n>f+++++++++ objects/cluster/clusterrolebinding/node-feature-discovery-gc.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/node-feature-discovery.txt\n>f+++++++++ objects/cluster/clusterrolebinding/node-feature-discovery.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/nova-bootstrap.txt\n>f+++++++++ objects/cluster/clusterrolebinding/nova-bootstrap.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/nova-cell-setup.txt\n>f+++++++++ objects/cluster/clusterrolebinding/nova-cell-setup.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/objectstorage-provisioner-role-binding.txt\n>f+++++++++ objects/cluster/clusterrolebinding/objectstorage-provisioner-role-binding.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/orc-manager-rolebinding.txt\n>f+++++++++ objects/cluster/clusterrolebinding/orc-manager-rolebinding.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/orc-metrics-auth-rolebinding.txt\n>f+++++++++ objects/cluster/clusterrolebinding/orc-metrics-auth-rolebinding.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/ovn-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/ovn-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/rabbitmq-cluster-operator-openstack.txt\n>f+++++++++ objects/cluster/clusterrolebinding/rabbitmq-cluster-operator-openstack.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/rabbitmq-messaging-topology-operator-openstack.txt\n>f+++++++++ objects/cluster/clusterrolebinding/rabbitmq-messaging-topology-operator-openstack.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/rbd-csi-nodeplugin.txt\n>f+++++++++ objects/cluster/clusterrolebinding/rbd-csi-nodeplugin.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/rbd-csi-provisioner-role.txt\n>f+++++++++ objects/cluster/clusterrolebinding/rbd-csi-provisioner-role.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-global.txt\n>f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-global.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-mgr-cluster-openstack.txt\n>f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-mgr-cluster-openstack.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-mgr-cluster.txt\n>f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-mgr-cluster.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-object-bucket.txt\n>f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-object-bucket.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-osd-openstack.txt\n>f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-osd-openstack.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-osd.txt\n>f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-osd.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-system.txt\n>f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-system.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/secretgen-controller-cluster-role-binding.txt\n>f+++++++++ objects/cluster/clusterrolebinding/secretgen-controller-cluster-role-binding.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:basic-user.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:basic-user.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:attachdetach-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:attachdetach-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:certificate-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:certificate-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:clusterrole-aggregation-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:clusterrole-aggregation-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:cronjob-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:cronjob-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:daemon-set-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:daemon-set-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:deployment-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:deployment-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:disruption-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:disruption-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpoint-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpoint-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslice-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslice-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslicemirroring-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslicemirroring-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:ephemeral-volume-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:ephemeral-volume-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:expand-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:expand-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:generic-garbage-collector.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:generic-garbage-collector.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:horizontal-pod-autoscaler.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:horizontal-pod-autoscaler.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:job-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:job-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:namespace-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:namespace-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:node-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:node-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:persistent-volume-binder.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:persistent-volume-binder.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:pod-garbage-collector.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:pod-garbage-collector.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:pv-protection-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:pv-protection-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:pvc-protection-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:pvc-protection-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:replicaset-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:replicaset-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:replication-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:replication-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:resourcequota-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:resourcequota-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:root-ca-cert-publisher.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:root-ca-cert-publisher.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:route-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:route-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-account-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-account-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:statefulset-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:statefulset-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-after-finished-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-after-finished-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-controller.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-controller.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:coredns.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:coredns.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:discovery.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:discovery.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:kube-controller-manager.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:kube-controller-manager.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:kube-dns.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:kube-dns.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:kube-scheduler.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:kube-scheduler.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:monitoring.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:monitoring.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:node-proxier.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:node-proxier.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:node.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:node.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:public-info-viewer.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:public-info-viewer.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:service-account-issuer-discovery.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:service-account-issuer-discovery.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/system:volume-scheduler.txt\n>f+++++++++ objects/cluster/clusterrolebinding/system:volume-scheduler.yaml\n>f+++++++++ objects/cluster/clusterrolebinding/vector.txt\n>f+++++++++ objects/cluster/clusterrolebinding/vector.yaml\ncd+++++++++ objects/cluster/namespace/\n>f+++++++++ objects/cluster/namespace/auth-system.txt\n>f+++++++++ objects/cluster/namespace/auth-system.yaml\n>f+++++++++ objects/cluster/namespace/capi-kubeadm-bootstrap-system.txt\n>f+++++++++ objects/cluster/namespace/capi-kubeadm-bootstrap-system.yaml\n>f+++++++++ objects/cluster/namespace/capi-kubeadm-control-plane-system.txt\n>f+++++++++ objects/cluster/namespace/capi-kubeadm-control-plane-system.yaml\n>f+++++++++ objects/cluster/namespace/capi-system.txt\n>f+++++++++ objects/cluster/namespace/capi-system.yaml\n>f+++++++++ objects/cluster/namespace/capo-system.txt\n>f+++++++++ objects/cluster/namespace/capo-system.yaml\n>f+++++++++ objects/cluster/namespace/cert-manager.txt\n>f+++++++++ objects/cluster/namespace/cert-manager.yaml\n>f+++++++++ objects/cluster/namespace/default.txt\n>f+++++++++ objects/cluster/namespace/default.yaml\n>f+++++++++ objects/cluster/namespace/ingress-nginx.txt\n>f+++++++++ objects/cluster/namespace/ingress-nginx.yaml\n>f+++++++++ objects/cluster/namespace/kube-node-lease.txt\n>f+++++++++ objects/cluster/namespace/kube-node-lease.yaml\n>f+++++++++ objects/cluster/namespace/kube-public.txt\n>f+++++++++ objects/cluster/namespace/kube-public.yaml\n>f+++++++++ objects/cluster/namespace/kube-system.txt\n>f+++++++++ objects/cluster/namespace/kube-system.yaml\n>f+++++++++ objects/cluster/namespace/local-path-storage.txt\n>f+++++++++ objects/cluster/namespace/local-path-storage.yaml\n>f+++++++++ objects/cluster/namespace/magnum-system.txt\n>f+++++++++ objects/cluster/namespace/magnum-system.yaml\n>f+++++++++ objects/cluster/namespace/monitoring.txt\n>f+++++++++ objects/cluster/namespace/monitoring.yaml\n>f+++++++++ objects/cluster/namespace/openstack.txt\n>f+++++++++ objects/cluster/namespace/openstack.yaml\n>f+++++++++ objects/cluster/namespace/orc-system.txt\n>f+++++++++ objects/cluster/namespace/orc-system.yaml\n>f+++++++++ objects/cluster/namespace/rook-ceph.txt\n>f+++++++++ objects/cluster/namespace/rook-ceph.yaml\n>f+++++++++ objects/cluster/namespace/secretgen-controller.txt\n>f+++++++++ objects/cluster/namespace/secretgen-controller.yaml\ncd+++++++++ objects/cluster/node/\n>f+++++++++ objects/cluster/node/instance.txt\n>f+++++++++ objects/cluster/node/instance.yaml\ncd+++++++++ objects/cluster/storageclass/\n>f+++++++++ objects/cluster/storageclass/general.txt\n>f+++++++++ objects/cluster/storageclass/general.yaml\ncd+++++++++ objects/namespaced/\ncd+++++++++ objects/namespaced/auth-system/\ncd+++++++++ objects/namespaced/auth-system/configmaps/\n>f+++++++++ objects/namespaced/auth-system/configmaps/keycloak-env-vars.txt\n>f+++++++++ objects/namespaced/auth-system/configmaps/keycloak-env-vars.yaml\n>f+++++++++ objects/namespaced/auth-system/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/auth-system/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/auth-system/endpoints/\n>f+++++++++ objects/namespaced/auth-system/endpoints/keycloak-headless.txt\n>f+++++++++ objects/namespaced/auth-system/endpoints/keycloak-headless.yaml\n>f+++++++++ objects/namespaced/auth-system/endpoints/keycloak-metrics.txt\n>f+++++++++ objects/namespaced/auth-system/endpoints/keycloak-metrics.yaml\n>f+++++++++ objects/namespaced/auth-system/endpoints/keycloak.txt\n>f+++++++++ objects/namespaced/auth-system/endpoints/keycloak.yaml\ncd+++++++++ objects/namespaced/auth-system/ingresses/\n>f+++++++++ objects/namespaced/auth-system/ingresses/keycloak.txt\n>f+++++++++ objects/namespaced/auth-system/ingresses/keycloak.yaml\ncd+++++++++ objects/namespaced/auth-system/networkpolicies/\n>f+++++++++ objects/namespaced/auth-system/networkpolicies/keycloak.txt\n>f+++++++++ objects/namespaced/auth-system/networkpolicies/keycloak.yaml\ncd+++++++++ objects/namespaced/auth-system/pods/\n>f+++++++++ objects/namespaced/auth-system/pods/keycloak-0.txt\n>f+++++++++ objects/namespaced/auth-system/pods/keycloak-0.yaml\ncd+++++++++ objects/namespaced/auth-system/secrets/\n>f+++++++++ objects/namespaced/auth-system/secrets/keycloak-externaldb.txt\n>f+++++++++ objects/namespaced/auth-system/secrets/keycloak-externaldb.yaml\n>f+++++++++ objects/namespaced/auth-system/secrets/keycloak.199-204-45-156.nip.io-tls.txt\n>f+++++++++ objects/namespaced/auth-system/secrets/keycloak.199-204-45-156.nip.io-tls.yaml\n>f+++++++++ objects/namespaced/auth-system/secrets/keycloak.txt\n>f+++++++++ objects/namespaced/auth-system/secrets/keycloak.yaml\n>f+++++++++ objects/namespaced/auth-system/secrets/sh.helm.release.v1.keycloak.v1.txt\n>f+++++++++ objects/namespaced/auth-system/secrets/sh.helm.release.v1.keycloak.v1.yaml\ncd+++++++++ objects/namespaced/auth-system/serviceaccounts/\n>f+++++++++ objects/namespaced/auth-system/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/auth-system/serviceaccounts/default.yaml\n>f+++++++++ objects/namespaced/auth-system/serviceaccounts/keycloak.txt\n>f+++++++++ objects/namespaced/auth-system/serviceaccounts/keycloak.yaml\ncd+++++++++ objects/namespaced/auth-system/services/\n>f+++++++++ objects/namespaced/auth-system/services/keycloak-headless.txt\n>f+++++++++ objects/namespaced/auth-system/services/keycloak-headless.yaml\n>f+++++++++ objects/namespaced/auth-system/services/keycloak-metrics.txt\n>f+++++++++ objects/namespaced/auth-system/services/keycloak-metrics.yaml\n>f+++++++++ objects/namespaced/auth-system/services/keycloak.txt\n>f+++++++++ objects/namespaced/auth-system/services/keycloak.yaml\ncd+++++++++ objects/namespaced/auth-system/statefulsets/\n>f+++++++++ objects/namespaced/auth-system/statefulsets/keycloak.txt\n>f+++++++++ objects/namespaced/auth-system/statefulsets/keycloak.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/\ncd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/configmaps/\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/deployment/\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/deployment/capi-kubeadm-bootstrap-controller-manager.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/deployment/capi-kubeadm-bootstrap-controller-manager.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/endpoints/\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/endpoints/capi-kubeadm-bootstrap-webhook-service.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/endpoints/capi-kubeadm-bootstrap-webhook-service.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/pods/\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/pods/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/pods/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/rolebindings/\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/rolebindings/capi-kubeadm-bootstrap-leader-election-rolebinding.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/rolebindings/capi-kubeadm-bootstrap-leader-election-rolebinding.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/roles/\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/roles/capi-kubeadm-bootstrap-leader-election-role.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/roles/capi-kubeadm-bootstrap-leader-election-role.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/secrets/\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/secrets/capi-kubeadm-bootstrap-webhook-service-cert.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/secrets/capi-kubeadm-bootstrap-webhook-service-cert.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/serviceaccounts/\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/serviceaccounts/capi-kubeadm-bootstrap-manager.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/serviceaccounts/capi-kubeadm-bootstrap-manager.yaml\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/serviceaccounts/default.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/services/\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/services/capi-kubeadm-bootstrap-webhook-service.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/services/capi-kubeadm-bootstrap-webhook-service.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/\ncd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/configmaps/\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/deployment/\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/deployment/capi-kubeadm-control-plane-controller-manager.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/deployment/capi-kubeadm-control-plane-controller-manager.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/endpoints/\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/endpoints/capi-kubeadm-control-plane-webhook-service.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/endpoints/capi-kubeadm-control-plane-webhook-service.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/pods/\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/pods/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/pods/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/rolebindings/\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/rolebindings/capi-kubeadm-control-plane-leader-election-rolebinding.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/rolebindings/capi-kubeadm-control-plane-leader-election-rolebinding.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/roles/\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/roles/capi-kubeadm-control-plane-leader-election-role.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/roles/capi-kubeadm-control-plane-leader-election-role.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/secrets/\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/secrets/capi-kubeadm-control-plane-webhook-service-cert.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/secrets/capi-kubeadm-control-plane-webhook-service-cert.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/serviceaccounts/\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/serviceaccounts/capi-kubeadm-control-plane-manager.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/serviceaccounts/capi-kubeadm-control-plane-manager.yaml\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/serviceaccounts/default.yaml\ncd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/services/\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/services/capi-kubeadm-control-plane-webhook-service.txt\n>f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/services/capi-kubeadm-control-plane-webhook-service.yaml\ncd+++++++++ objects/namespaced/capi-system/\ncd+++++++++ objects/namespaced/capi-system/configmaps/\n>f+++++++++ objects/namespaced/capi-system/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/capi-system/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/capi-system/deployment/\n>f+++++++++ objects/namespaced/capi-system/deployment/capi-controller-manager.txt\n>f+++++++++ objects/namespaced/capi-system/deployment/capi-controller-manager.yaml\ncd+++++++++ objects/namespaced/capi-system/endpoints/\n>f+++++++++ objects/namespaced/capi-system/endpoints/capi-webhook-service.txt\n>f+++++++++ objects/namespaced/capi-system/endpoints/capi-webhook-service.yaml\ncd+++++++++ objects/namespaced/capi-system/pods/\n>f+++++++++ objects/namespaced/capi-system/pods/capi-controller-manager-bc4cf8c95-w8p6b.txt\n>f+++++++++ objects/namespaced/capi-system/pods/capi-controller-manager-bc4cf8c95-w8p6b.yaml\ncd+++++++++ objects/namespaced/capi-system/rolebindings/\n>f+++++++++ objects/namespaced/capi-system/rolebindings/capi-leader-election-rolebinding.txt\n>f+++++++++ objects/namespaced/capi-system/rolebindings/capi-leader-election-rolebinding.yaml\ncd+++++++++ objects/namespaced/capi-system/roles/\n>f+++++++++ objects/namespaced/capi-system/roles/capi-leader-election-role.txt\n>f+++++++++ objects/namespaced/capi-system/roles/capi-leader-election-role.yaml\ncd+++++++++ objects/namespaced/capi-system/secrets/\n>f+++++++++ objects/namespaced/capi-system/secrets/capi-webhook-service-cert.txt\n>f+++++++++ objects/namespaced/capi-system/secrets/capi-webhook-service-cert.yaml\ncd+++++++++ objects/namespaced/capi-system/serviceaccounts/\n>f+++++++++ objects/namespaced/capi-system/serviceaccounts/capi-manager.txt\n>f+++++++++ objects/namespaced/capi-system/serviceaccounts/capi-manager.yaml\n>f+++++++++ objects/namespaced/capi-system/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/capi-system/serviceaccounts/default.yaml\ncd+++++++++ objects/namespaced/capi-system/services/\n>f+++++++++ objects/namespaced/capi-system/services/capi-webhook-service.txt\n>f+++++++++ objects/namespaced/capi-system/services/capi-webhook-service.yaml\ncd+++++++++ objects/namespaced/capo-system/\ncd+++++++++ objects/namespaced/capo-system/configmaps/\n>f+++++++++ objects/namespaced/capo-system/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/capo-system/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/capo-system/deployment/\n>f+++++++++ objects/namespaced/capo-system/deployment/capo-controller-manager.txt\n>f+++++++++ objects/namespaced/capo-system/deployment/capo-controller-manager.yaml\ncd+++++++++ objects/namespaced/capo-system/endpoints/\n>f+++++++++ objects/namespaced/capo-system/endpoints/capo-webhook-service.txt\n>f+++++++++ objects/namespaced/capo-system/endpoints/capo-webhook-service.yaml\ncd+++++++++ objects/namespaced/capo-system/pods/\n>f+++++++++ objects/namespaced/capo-system/pods/capo-controller-manager-6975759b4b-tkxrs.txt\n>f+++++++++ objects/namespaced/capo-system/pods/capo-controller-manager-6975759b4b-tkxrs.yaml\ncd+++++++++ objects/namespaced/capo-system/rolebindings/\n>f+++++++++ objects/namespaced/capo-system/rolebindings/capo-leader-election-rolebinding.txt\n>f+++++++++ objects/namespaced/capo-system/rolebindings/capo-leader-election-rolebinding.yaml\ncd+++++++++ objects/namespaced/capo-system/roles/\n>f+++++++++ objects/namespaced/capo-system/roles/capo-leader-election-role.txt\n>f+++++++++ objects/namespaced/capo-system/roles/capo-leader-election-role.yaml\ncd+++++++++ objects/namespaced/capo-system/secrets/\n>f+++++++++ objects/namespaced/capo-system/secrets/capo-webhook-service-cert.txt\n>f+++++++++ objects/namespaced/capo-system/secrets/capo-webhook-service-cert.yaml\ncd+++++++++ objects/namespaced/capo-system/serviceaccounts/\n>f+++++++++ objects/namespaced/capo-system/serviceaccounts/capo-manager.txt\n>f+++++++++ objects/namespaced/capo-system/serviceaccounts/capo-manager.yaml\n>f+++++++++ objects/namespaced/capo-system/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/capo-system/serviceaccounts/default.yaml\ncd+++++++++ objects/namespaced/capo-system/services/\n>f+++++++++ objects/namespaced/capo-system/services/capo-webhook-service.txt\n>f+++++++++ objects/namespaced/capo-system/services/capo-webhook-service.yaml\ncd+++++++++ objects/namespaced/cert-manager/\ncd+++++++++ objects/namespaced/cert-manager/configmaps/\n>f+++++++++ objects/namespaced/cert-manager/configmaps/cert-manager-webhook.txt\n>f+++++++++ objects/namespaced/cert-manager/configmaps/cert-manager-webhook.yaml\n>f+++++++++ objects/namespaced/cert-manager/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/cert-manager/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/cert-manager/deployment/\n>f+++++++++ objects/namespaced/cert-manager/deployment/cert-manager-cainjector.txt\n>f+++++++++ objects/namespaced/cert-manager/deployment/cert-manager-cainjector.yaml\n>f+++++++++ objects/namespaced/cert-manager/deployment/cert-manager-webhook.txt\n>f+++++++++ objects/namespaced/cert-manager/deployment/cert-manager-webhook.yaml\n>f+++++++++ objects/namespaced/cert-manager/deployment/cert-manager.txt\n>f+++++++++ objects/namespaced/cert-manager/deployment/cert-manager.yaml\ncd+++++++++ objects/namespaced/cert-manager/endpoints/\n>f+++++++++ objects/namespaced/cert-manager/endpoints/cert-manager-webhook.txt\n>f+++++++++ objects/namespaced/cert-manager/endpoints/cert-manager-webhook.yaml\n>f+++++++++ objects/namespaced/cert-manager/endpoints/cert-manager.txt\n>f+++++++++ objects/namespaced/cert-manager/endpoints/cert-manager.yaml\ncd+++++++++ objects/namespaced/cert-manager/pods/\n>f+++++++++ objects/namespaced/cert-manager/pods/cert-manager-75c4c745bc-45s4r.txt\n>f+++++++++ objects/namespaced/cert-manager/pods/cert-manager-75c4c745bc-45s4r.yaml\n>f+++++++++ objects/namespaced/cert-manager/pods/cert-manager-cainjector-64b59ddb75-tl5x7.txt\n>f+++++++++ objects/namespaced/cert-manager/pods/cert-manager-cainjector-64b59ddb75-tl5x7.yaml\n>f+++++++++ objects/namespaced/cert-manager/pods/cert-manager-webhook-548949fc64-vkrlt.txt\n>f+++++++++ objects/namespaced/cert-manager/pods/cert-manager-webhook-548949fc64-vkrlt.yaml\ncd+++++++++ objects/namespaced/cert-manager/rolebindings/\n>f+++++++++ objects/namespaced/cert-manager/rolebindings/cert-manager-cainjector:leaderelection.txt\n>f+++++++++ objects/namespaced/cert-manager/rolebindings/cert-manager-cainjector:leaderelection.yaml\n>f+++++++++ objects/namespaced/cert-manager/rolebindings/cert-manager-webhook:dynamic-serving.txt\n>f+++++++++ objects/namespaced/cert-manager/rolebindings/cert-manager-webhook:dynamic-serving.yaml\n>f+++++++++ objects/namespaced/cert-manager/rolebindings/cert-manager:leaderelection.txt\n>f+++++++++ objects/namespaced/cert-manager/rolebindings/cert-manager:leaderelection.yaml\ncd+++++++++ objects/namespaced/cert-manager/roles/\n>f+++++++++ objects/namespaced/cert-manager/roles/cert-manager-cainjector:leaderelection.txt\n>f+++++++++ objects/namespaced/cert-manager/roles/cert-manager-cainjector:leaderelection.yaml\n>f+++++++++ objects/namespaced/cert-manager/roles/cert-manager-webhook:dynamic-serving.txt\n>f+++++++++ objects/namespaced/cert-manager/roles/cert-manager-webhook:dynamic-serving.yaml\n>f+++++++++ objects/namespaced/cert-manager/roles/cert-manager:leaderelection.txt\n>f+++++++++ objects/namespaced/cert-manager/roles/cert-manager:leaderelection.yaml\ncd+++++++++ objects/namespaced/cert-manager/secrets/\n>f+++++++++ objects/namespaced/cert-manager/secrets/cert-manager-selfsigned-ca.txt\n>f+++++++++ objects/namespaced/cert-manager/secrets/cert-manager-selfsigned-ca.yaml\n>f+++++++++ objects/namespaced/cert-manager/secrets/cert-manager-webhook-ca.txt\n>f+++++++++ objects/namespaced/cert-manager/secrets/cert-manager-webhook-ca.yaml\n>f+++++++++ objects/namespaced/cert-manager/secrets/kube-prometheus-stack-ca.txt\n>f+++++++++ objects/namespaced/cert-manager/secrets/kube-prometheus-stack-ca.yaml\n>f+++++++++ objects/namespaced/cert-manager/secrets/sh.helm.release.v1.cert-manager.v1.txt\n>f+++++++++ objects/namespaced/cert-manager/secrets/sh.helm.release.v1.cert-manager.v1.yaml\ncd+++++++++ objects/namespaced/cert-manager/serviceaccounts/\n>f+++++++++ objects/namespaced/cert-manager/serviceaccounts/cert-manager-cainjector.txt\n>f+++++++++ objects/namespaced/cert-manager/serviceaccounts/cert-manager-cainjector.yaml\n>f+++++++++ objects/namespaced/cert-manager/serviceaccounts/cert-manager-webhook.txt\n>f+++++++++ objects/namespaced/cert-manager/serviceaccounts/cert-manager-webhook.yaml\n>f+++++++++ objects/namespaced/cert-manager/serviceaccounts/cert-manager.txt\n>f+++++++++ objects/namespaced/cert-manager/serviceaccounts/cert-manager.yaml\n>f+++++++++ objects/namespaced/cert-manager/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/cert-manager/serviceaccounts/default.yaml\ncd+++++++++ objects/namespaced/cert-manager/services/\n>f+++++++++ objects/namespaced/cert-manager/services/cert-manager-webhook.txt\n>f+++++++++ objects/namespaced/cert-manager/services/cert-manager-webhook.yaml\n>f+++++++++ objects/namespaced/cert-manager/services/cert-manager.txt\n>f+++++++++ objects/namespaced/cert-manager/services/cert-manager.yaml\ncd+++++++++ objects/namespaced/default/\ncd+++++++++ objects/namespaced/default/configmaps/\n>f+++++++++ objects/namespaced/default/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/default/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/default/endpoints/\n>f+++++++++ objects/namespaced/default/endpoints/kubernetes.txt\n>f+++++++++ objects/namespaced/default/endpoints/kubernetes.yaml\ncd+++++++++ objects/namespaced/default/serviceaccounts/\n>f+++++++++ objects/namespaced/default/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/default/serviceaccounts/default.yaml\ncd+++++++++ objects/namespaced/default/services/\n>f+++++++++ objects/namespaced/default/services/kubernetes.txt\n>f+++++++++ objects/namespaced/default/services/kubernetes.yaml\ncd+++++++++ objects/namespaced/ingress-nginx/\ncd+++++++++ objects/namespaced/ingress-nginx/configmaps/\n>f+++++++++ objects/namespaced/ingress-nginx/configmaps/ingress-nginx-controller.txt\n>f+++++++++ objects/namespaced/ingress-nginx/configmaps/ingress-nginx-controller.yaml\n>f+++++++++ objects/namespaced/ingress-nginx/configmaps/ingress-nginx-tcp.txt\n>f+++++++++ objects/namespaced/ingress-nginx/configmaps/ingress-nginx-tcp.yaml\n>f+++++++++ objects/namespaced/ingress-nginx/configmaps/ingress-nginx-udp.txt\n>f+++++++++ objects/namespaced/ingress-nginx/configmaps/ingress-nginx-udp.yaml\n>f+++++++++ objects/namespaced/ingress-nginx/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/ingress-nginx/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/ingress-nginx/daemonsets/\n>f+++++++++ objects/namespaced/ingress-nginx/daemonsets/ingress-nginx-controller.txt\n>f+++++++++ objects/namespaced/ingress-nginx/daemonsets/ingress-nginx-controller.yaml\ncd+++++++++ objects/namespaced/ingress-nginx/deployment/\n>f+++++++++ objects/namespaced/ingress-nginx/deployment/ingress-nginx-defaultbackend.txt\n>f+++++++++ objects/namespaced/ingress-nginx/deployment/ingress-nginx-defaultbackend.yaml\ncd+++++++++ objects/namespaced/ingress-nginx/endpoints/\n>f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-controller-admission.txt\n>f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-controller-admission.yaml\n>f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-controller-metrics.txt\n>f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-controller-metrics.yaml\n>f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-controller.txt\n>f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-controller.yaml\n>f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-defaultbackend.txt\n>f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-defaultbackend.yaml\ncd+++++++++ objects/namespaced/ingress-nginx/pods/\n>f+++++++++ objects/namespaced/ingress-nginx/pods/ingress-nginx-controller-j4bqv.txt\n>f+++++++++ objects/namespaced/ingress-nginx/pods/ingress-nginx-controller-j4bqv.yaml\n>f+++++++++ objects/namespaced/ingress-nginx/pods/ingress-nginx-defaultbackend-6987ff55cf-gpx4l.txt\n>f+++++++++ objects/namespaced/ingress-nginx/pods/ingress-nginx-defaultbackend-6987ff55cf-gpx4l.yaml\ncd+++++++++ objects/namespaced/ingress-nginx/rolebindings/\n>f+++++++++ objects/namespaced/ingress-nginx/rolebindings/ingress-nginx.txt\n>f+++++++++ objects/namespaced/ingress-nginx/rolebindings/ingress-nginx.yaml\ncd+++++++++ objects/namespaced/ingress-nginx/roles/\n>f+++++++++ objects/namespaced/ingress-nginx/roles/ingress-nginx.txt\n>f+++++++++ objects/namespaced/ingress-nginx/roles/ingress-nginx.yaml\ncd+++++++++ objects/namespaced/ingress-nginx/secrets/\n>f+++++++++ objects/namespaced/ingress-nginx/secrets/ingress-nginx-admission.txt\n>f+++++++++ objects/namespaced/ingress-nginx/secrets/ingress-nginx-admission.yaml\n>f+++++++++ objects/namespaced/ingress-nginx/secrets/sh.helm.release.v1.ingress-nginx.v1.txt\n>f+++++++++ objects/namespaced/ingress-nginx/secrets/sh.helm.release.v1.ingress-nginx.v1.yaml\ncd+++++++++ objects/namespaced/ingress-nginx/serviceaccounts/\n>f+++++++++ objects/namespaced/ingress-nginx/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/ingress-nginx/serviceaccounts/default.yaml\n>f+++++++++ objects/namespaced/ingress-nginx/serviceaccounts/ingress-nginx-backend.txt\n>f+++++++++ objects/namespaced/ingress-nginx/serviceaccounts/ingress-nginx-backend.yaml\n>f+++++++++ objects/namespaced/ingress-nginx/serviceaccounts/ingress-nginx.txt\n>f+++++++++ objects/namespaced/ingress-nginx/serviceaccounts/ingress-nginx.yaml\ncd+++++++++ objects/namespaced/ingress-nginx/services/\n>f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-controller-admission.txt\n>f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-controller-admission.yaml\n>f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-controller-metrics.txt\n>f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-controller-metrics.yaml\n>f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-controller.txt\n>f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-controller.yaml\n>f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-defaultbackend.txt\n>f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-defaultbackend.yaml\ncd+++++++++ objects/namespaced/kube-node-lease/\ncd+++++++++ objects/namespaced/kube-node-lease/configmaps/\n>f+++++++++ objects/namespaced/kube-node-lease/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/kube-node-lease/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/kube-node-lease/serviceaccounts/\n>f+++++++++ objects/namespaced/kube-node-lease/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/kube-node-lease/serviceaccounts/default.yaml\ncd+++++++++ objects/namespaced/kube-public/\ncd+++++++++ objects/namespaced/kube-public/configmaps/\n>f+++++++++ objects/namespaced/kube-public/configmaps/cluster-info.txt\n>f+++++++++ objects/namespaced/kube-public/configmaps/cluster-info.yaml\n>f+++++++++ objects/namespaced/kube-public/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/kube-public/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/kube-public/rolebindings/\n>f+++++++++ objects/namespaced/kube-public/rolebindings/kubeadm:bootstrap-signer-clusterinfo.txt\n>f+++++++++ objects/namespaced/kube-public/rolebindings/kubeadm:bootstrap-signer-clusterinfo.yaml\n>f+++++++++ objects/namespaced/kube-public/rolebindings/system:controller:bootstrap-signer.txt\n>f+++++++++ objects/namespaced/kube-public/rolebindings/system:controller:bootstrap-signer.yaml\ncd+++++++++ objects/namespaced/kube-public/roles/\n>f+++++++++ objects/namespaced/kube-public/roles/kubeadm:bootstrap-signer-clusterinfo.txt\n>f+++++++++ objects/namespaced/kube-public/roles/kubeadm:bootstrap-signer-clusterinfo.yaml\n>f+++++++++ objects/namespaced/kube-public/roles/system:controller:bootstrap-signer.txt\n>f+++++++++ objects/namespaced/kube-public/roles/system:controller:bootstrap-signer.yaml\ncd+++++++++ objects/namespaced/kube-public/serviceaccounts/\n>f+++++++++ objects/namespaced/kube-public/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/kube-public/serviceaccounts/default.yaml\ncd+++++++++ objects/namespaced/kube-system/\ncd+++++++++ objects/namespaced/kube-system/configmaps/\n>f+++++++++ objects/namespaced/kube-system/configmaps/cilium-config.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/cilium-config.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/coredns.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/coredns.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/extension-apiserver-authentication.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/extension-apiserver-authentication.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/kube-apiserver-legacy-service-account-token-tracking.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/kube-apiserver-legacy-service-account-token-tracking.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/kube-proxy.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/kube-proxy.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/kube-root-ca.crt.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/kubeadm-config.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/kubeadm-config.yaml\n>f+++++++++ objects/namespaced/kube-system/configmaps/kubelet-config.txt\n>f+++++++++ objects/namespaced/kube-system/configmaps/kubelet-config.yaml\ncd+++++++++ objects/namespaced/kube-system/daemonsets/\n>f+++++++++ objects/namespaced/kube-system/daemonsets/cilium.txt\n>f+++++++++ objects/namespaced/kube-system/daemonsets/cilium.yaml\n>f+++++++++ objects/namespaced/kube-system/daemonsets/kube-proxy.txt\n>f+++++++++ objects/namespaced/kube-system/daemonsets/kube-proxy.yaml\ncd+++++++++ objects/namespaced/kube-system/deployment/\n>f+++++++++ objects/namespaced/kube-system/deployment/cilium-operator.txt\n>f+++++++++ objects/namespaced/kube-system/deployment/cilium-operator.yaml\n>f+++++++++ objects/namespaced/kube-system/deployment/coredns.txt\n>f+++++++++ objects/namespaced/kube-system/deployment/coredns.yaml\ncd+++++++++ objects/namespaced/kube-system/endpoints/\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-dns.txt\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-dns.yaml\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-coredns.txt\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-coredns.yaml\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-controller-manager.txt\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-controller-manager.yaml\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-etcd.txt\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-etcd.yaml\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-proxy.txt\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-proxy.yaml\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-scheduler.txt\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-scheduler.yaml\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kubelet.txt\n>f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kubelet.yaml\ncd+++++++++ objects/namespaced/kube-system/pods/\n>f+++++++++ objects/namespaced/kube-system/pods/cilium-operator-869df985b8-kszk2.txt\n>f+++++++++ objects/namespaced/kube-system/pods/cilium-operator-869df985b8-kszk2.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/cilium-vdz4f.txt\n>f+++++++++ objects/namespaced/kube-system/pods/cilium-vdz4f.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/coredns-67659f764b-6f2mm.txt\n>f+++++++++ objects/namespaced/kube-system/pods/coredns-67659f764b-6f2mm.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/coredns-67659f764b-j6fp4.txt\n>f+++++++++ objects/namespaced/kube-system/pods/coredns-67659f764b-j6fp4.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/etcd-instance.txt\n>f+++++++++ objects/namespaced/kube-system/pods/etcd-instance.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/kube-apiserver-instance.txt\n>f+++++++++ objects/namespaced/kube-system/pods/kube-apiserver-instance.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/kube-controller-manager-instance.txt\n>f+++++++++ objects/namespaced/kube-system/pods/kube-controller-manager-instance.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/kube-proxy-sp2vs.txt\n>f+++++++++ objects/namespaced/kube-system/pods/kube-proxy-sp2vs.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/kube-scheduler-instance.txt\n>f+++++++++ objects/namespaced/kube-system/pods/kube-scheduler-instance.yaml\n>f+++++++++ objects/namespaced/kube-system/pods/kube-vip-instance.txt\n>f+++++++++ objects/namespaced/kube-system/pods/kube-vip-instance.yaml\ncd+++++++++ objects/namespaced/kube-system/rolebindings/\n>f+++++++++ objects/namespaced/kube-system/rolebindings/cilium-config-agent.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/cilium-config-agent.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kube-proxy.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kube-proxy.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubeadm-certs.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubeadm-certs.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubelet-config.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubelet-config.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:nodes-kubeadm-config.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:nodes-kubeadm-config.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system::extension-apiserver-authentication-reader.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system::extension-apiserver-authentication-reader.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-controller-manager.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-controller-manager.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-scheduler.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-scheduler.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:bootstrap-signer.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:bootstrap-signer.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:cloud-provider.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:cloud-provider.yaml\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:token-cleaner.txt\n>f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:token-cleaner.yaml\ncd+++++++++ objects/namespaced/kube-system/roles/\n>f+++++++++ objects/namespaced/kube-system/roles/cilium-config-agent.txt\n>f+++++++++ objects/namespaced/kube-system/roles/cilium-config-agent.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/extension-apiserver-authentication-reader.txt\n>f+++++++++ objects/namespaced/kube-system/roles/extension-apiserver-authentication-reader.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/kube-proxy.txt\n>f+++++++++ objects/namespaced/kube-system/roles/kube-proxy.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubeadm-certs.txt\n>f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubeadm-certs.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubelet-config.txt\n>f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubelet-config.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/kubeadm:nodes-kubeadm-config.txt\n>f+++++++++ objects/namespaced/kube-system/roles/kubeadm:nodes-kubeadm-config.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-controller-manager.txt\n>f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-controller-manager.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-scheduler.txt\n>f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-scheduler.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/system:controller:bootstrap-signer.txt\n>f+++++++++ objects/namespaced/kube-system/roles/system:controller:bootstrap-signer.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/system:controller:cloud-provider.txt\n>f+++++++++ objects/namespaced/kube-system/roles/system:controller:cloud-provider.yaml\n>f+++++++++ objects/namespaced/kube-system/roles/system:controller:token-cleaner.txt\n>f+++++++++ objects/namespaced/kube-system/roles/system:controller:token-cleaner.yaml\ncd+++++++++ objects/namespaced/kube-system/secrets/\n>f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-592olp.txt\n>f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-592olp.yaml\n>f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-vahv4y.txt\n>f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-vahv4y.yaml\n>f+++++++++ objects/namespaced/kube-system/secrets/kubeadm-certs.txt\n>f+++++++++ objects/namespaced/kube-system/secrets/kubeadm-certs.yaml\n>f+++++++++ objects/namespaced/kube-system/secrets/sh.helm.release.v1.cilium.v1.txt\n>f+++++++++ objects/namespaced/kube-system/secrets/sh.helm.release.v1.cilium.v1.yaml\ncd+++++++++ objects/namespaced/kube-system/serviceaccounts/\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/attachdetach-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/attachdetach-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/bootstrap-signer.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/bootstrap-signer.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/certificate-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/certificate-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium-operator.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium-operator.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/clusterrole-aggregation-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/clusterrole-aggregation-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/coredns.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/coredns.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/cronjob-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/cronjob-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/daemon-set-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/daemon-set-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/default.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/deployment-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/deployment-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/disruption-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/disruption-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpoint-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpoint-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslice-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslice-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslicemirroring-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslicemirroring-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ephemeral-volume-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ephemeral-volume-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/expand-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/expand-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/generic-garbage-collector.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/generic-garbage-collector.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/horizontal-pod-autoscaler.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/horizontal-pod-autoscaler.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/job-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/job-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/kube-proxy.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/kube-proxy.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/namespace-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/namespace-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/node-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/node-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/persistent-volume-binder.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/persistent-volume-binder.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/pod-garbage-collector.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/pod-garbage-collector.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/pv-protection-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/pv-protection-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/pvc-protection-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/pvc-protection-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/replicaset-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/replicaset-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/replication-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/replication-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/resourcequota-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/resourcequota-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/root-ca-cert-publisher.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/root-ca-cert-publisher.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-account-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-account-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/statefulset-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/statefulset-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/token-cleaner.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/token-cleaner.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-after-finished-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-after-finished-controller.yaml\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-controller.txt\n>f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-controller.yaml\ncd+++++++++ objects/namespaced/kube-system/services/\n>f+++++++++ objects/namespaced/kube-system/services/kube-dns.txt\n>f+++++++++ objects/namespaced/kube-system/services/kube-dns.yaml\n>f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-coredns.txt\n>f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-coredns.yaml\n>f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-controller-manager.txt\n>f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-controller-manager.yaml\n>f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-etcd.txt\n>f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-etcd.yaml\n>f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-proxy.txt\n>f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-proxy.yaml\n>f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-scheduler.txt\n>f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-scheduler.yaml\n>f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kubelet.txt\n>f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kubelet.yaml\ncd+++++++++ objects/namespaced/local-path-storage/\ncd+++++++++ objects/namespaced/local-path-storage/configmaps/\n>f+++++++++ objects/namespaced/local-path-storage/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/local-path-storage/configmaps/kube-root-ca.crt.yaml\n>f+++++++++ objects/namespaced/local-path-storage/configmaps/local-path-config.txt\n>f+++++++++ objects/namespaced/local-path-storage/configmaps/local-path-config.yaml\ncd+++++++++ objects/namespaced/local-path-storage/deployment/\n>f+++++++++ objects/namespaced/local-path-storage/deployment/local-path-provisioner.txt\n>f+++++++++ objects/namespaced/local-path-storage/deployment/local-path-provisioner.yaml\ncd+++++++++ objects/namespaced/local-path-storage/pods/\n>f+++++++++ objects/namespaced/local-path-storage/pods/local-path-provisioner-679c578f5-7h8w5.txt\n>f+++++++++ objects/namespaced/local-path-storage/pods/local-path-provisioner-679c578f5-7h8w5.yaml\ncd+++++++++ objects/namespaced/local-path-storage/secrets/\n>f+++++++++ objects/namespaced/local-path-storage/secrets/sh.helm.release.v1.local-path-provisioner.v1.txt\n>f+++++++++ objects/namespaced/local-path-storage/secrets/sh.helm.release.v1.local-path-provisioner.v1.yaml\ncd+++++++++ objects/namespaced/local-path-storage/serviceaccounts/\n>f+++++++++ objects/namespaced/local-path-storage/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/local-path-storage/serviceaccounts/default.yaml\n>f+++++++++ objects/namespaced/local-path-storage/serviceaccounts/local-path-provisioner.txt\n>f+++++++++ objects/namespaced/local-path-storage/serviceaccounts/local-path-provisioner.yaml\ncd+++++++++ objects/namespaced/magnum-system/\ncd+++++++++ objects/namespaced/magnum-system/configmaps/\n>f+++++++++ objects/namespaced/magnum-system/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/magnum-system/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/magnum-system/serviceaccounts/\n>f+++++++++ objects/namespaced/magnum-system/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/magnum-system/serviceaccounts/default.yaml\ncd+++++++++ objects/namespaced/monitoring/\ncd+++++++++ objects/namespaced/monitoring/configmaps/\n>f+++++++++ objects/namespaced/monitoring/configmaps/goldpinger-zap.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/goldpinger-zap.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/ipmi-exporter.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/ipmi-exporter.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-alertmanager-overview.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-alertmanager-overview.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-apiserver.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-apiserver.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-cluster-total.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-cluster-total.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-controller-manager.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-controller-manager.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-ceph-cluster-advanced.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-ceph-cluster-advanced.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-ceph-cluster.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-ceph-cluster.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-goldpinger.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-goldpinger.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-haproxy.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-haproxy.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-host-details.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-host-details.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-hosts-overview.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-hosts-overview.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-node-exporter-full.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-node-exporter-full.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-osd-device-details.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-osd-device-details.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-osds-overview.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-osds-overview.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-pool-detail.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-pool-detail.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-pool-overview.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-pool-overview.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-rbd-details.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-rbd-details.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-rbd-overview.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-rbd-overview.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-etcd.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-etcd.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana-config-dashboards.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana-config-dashboards.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana-datasource.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana-datasource.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana-overview.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana-overview.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-coredns.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-coredns.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-cluster.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-cluster.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-multicluster.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-multicluster.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-namespace.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-namespace.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-node.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-node.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-pod.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-pod.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-workload.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-workload.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-workloads-namespace.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-workloads-namespace.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-kubelet.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-kubelet.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-namespace-by-pod.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-namespace-by-pod.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-namespace-by-workload.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-namespace-by-workload.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-node-cluster-rsrc-use.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-node-cluster-rsrc-use.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-node-exporter.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-node-exporter.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-node-rsrc-use.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-node-rsrc-use.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-nodes-darwin.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-nodes-darwin.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-nodes.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-nodes.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-persistentvolumesusage.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-persistentvolumesusage.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-pod-total.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-pod-total.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-prometheus-tls.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-prometheus-tls.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-prometheus.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-prometheus.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-proxy.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-proxy.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-scheduler.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-scheduler.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-workload-total.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-workload-total.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/kube-root-ca.crt.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/loki-alerting-rules.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/loki-alerting-rules.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/loki-gateway.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/loki-gateway.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/loki-runtime.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/loki-runtime.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/loki.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/loki.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/node-feature-discovery-master-conf.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/node-feature-discovery-master-conf.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/node-feature-discovery-topology-updater-conf.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/node-feature-discovery-topology-updater-conf.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/node-feature-discovery-worker-conf.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/node-feature-discovery-worker-conf.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/prometheus-kube-prometheus-stack-prometheus-rulefiles-0.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/prometheus-kube-prometheus-stack-prometheus-rulefiles-0.yaml\n>f+++++++++ objects/namespaced/monitoring/configmaps/vector.txt\n>f+++++++++ objects/namespaced/monitoring/configmaps/vector.yaml\ncd+++++++++ objects/namespaced/monitoring/daemonsets/\n>f+++++++++ objects/namespaced/monitoring/daemonsets/goldpinger.txt\n>f+++++++++ objects/namespaced/monitoring/daemonsets/goldpinger.yaml\n>f+++++++++ objects/namespaced/monitoring/daemonsets/ipmi-exporter.txt\n>f+++++++++ objects/namespaced/monitoring/daemonsets/ipmi-exporter.yaml\n>f+++++++++ objects/namespaced/monitoring/daemonsets/kube-prometheus-stack-prometheus-node-exporter.txt\n>f+++++++++ objects/namespaced/monitoring/daemonsets/kube-prometheus-stack-prometheus-node-exporter.yaml\n>f+++++++++ objects/namespaced/monitoring/daemonsets/node-feature-discovery-worker.txt\n>f+++++++++ objects/namespaced/monitoring/daemonsets/node-feature-discovery-worker.yaml\n>f+++++++++ objects/namespaced/monitoring/daemonsets/vector.txt\n>f+++++++++ objects/namespaced/monitoring/daemonsets/vector.yaml\ncd+++++++++ objects/namespaced/monitoring/deployment/\n>f+++++++++ objects/namespaced/monitoring/deployment/kube-prometheus-stack-grafana.txt\n>f+++++++++ objects/namespaced/monitoring/deployment/kube-prometheus-stack-grafana.yaml\n>f+++++++++ objects/namespaced/monitoring/deployment/kube-prometheus-stack-kube-state-metrics.txt\n>f+++++++++ objects/namespaced/monitoring/deployment/kube-prometheus-stack-kube-state-metrics.yaml\n>f+++++++++ objects/namespaced/monitoring/deployment/kube-prometheus-stack-operator.txt\n>f+++++++++ objects/namespaced/monitoring/deployment/kube-prometheus-stack-operator.yaml\n>f+++++++++ objects/namespaced/monitoring/deployment/loki-gateway.txt\n>f+++++++++ objects/namespaced/monitoring/deployment/loki-gateway.yaml\n>f+++++++++ objects/namespaced/monitoring/deployment/node-feature-discovery-gc.txt\n>f+++++++++ objects/namespaced/monitoring/deployment/node-feature-discovery-gc.yaml\n>f+++++++++ objects/namespaced/monitoring/deployment/node-feature-discovery-master.txt\n>f+++++++++ objects/namespaced/monitoring/deployment/node-feature-discovery-master.yaml\n>f+++++++++ objects/namespaced/monitoring/deployment/prometheus-pushgateway.txt\n>f+++++++++ objects/namespaced/monitoring/deployment/prometheus-pushgateway.yaml\ncd+++++++++ objects/namespaced/monitoring/endpoints/\n>f+++++++++ objects/namespaced/monitoring/endpoints/alertmanager-operated.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/alertmanager-operated.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/goldpinger.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/goldpinger.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-alertmanager.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-alertmanager.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-grafana.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-grafana.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-kube-state-metrics.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-kube-state-metrics.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-operator.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-operator.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-prometheus-node-exporter.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-prometheus-node-exporter.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-prometheus.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-prometheus.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/loki-chunks-cache.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/loki-chunks-cache.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/loki-gateway.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/loki-gateway.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/loki-headless.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/loki-headless.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/loki-memberlist.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/loki-memberlist.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/loki-results-cache.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/loki-results-cache.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/loki.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/loki.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/prometheus-operated.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/prometheus-operated.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/prometheus-pushgateway.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/prometheus-pushgateway.yaml\n>f+++++++++ objects/namespaced/monitoring/endpoints/vector-headless.txt\n>f+++++++++ objects/namespaced/monitoring/endpoints/vector-headless.yaml\ncd+++++++++ objects/namespaced/monitoring/ingresses/\n>f+++++++++ objects/namespaced/monitoring/ingresses/kube-prometheus-stack-alertmanager.txt\n>f+++++++++ objects/namespaced/monitoring/ingresses/kube-prometheus-stack-alertmanager.yaml\n>f+++++++++ objects/namespaced/monitoring/ingresses/kube-prometheus-stack-grafana.txt\n>f+++++++++ objects/namespaced/monitoring/ingresses/kube-prometheus-stack-grafana.yaml\n>f+++++++++ objects/namespaced/monitoring/ingresses/kube-prometheus-stack-prometheus.txt\n>f+++++++++ objects/namespaced/monitoring/ingresses/kube-prometheus-stack-prometheus.yaml\ncd+++++++++ objects/namespaced/monitoring/persistentvolumeclaims/\n>f+++++++++ objects/namespaced/monitoring/persistentvolumeclaims/alertmanager-kube-prometheus-stack-alertmanager-db-alertmanager-kube-prometheus-stack-alertmanager-0.txt\n>f+++++++++ objects/namespaced/monitoring/persistentvolumeclaims/alertmanager-kube-prometheus-stack-alertmanager-db-alertmanager-kube-prometheus-stack-alertmanager-0.yaml\n>f+++++++++ objects/namespaced/monitoring/persistentvolumeclaims/prometheus-kube-prometheus-stack-prometheus-db-prometheus-kube-prometheus-stack-prometheus-0.txt\n>f+++++++++ objects/namespaced/monitoring/persistentvolumeclaims/prometheus-kube-prometheus-stack-prometheus-db-prometheus-kube-prometheus-stack-prometheus-0.yaml\n>f+++++++++ objects/namespaced/monitoring/persistentvolumeclaims/storage-loki-0.txt\n>f+++++++++ objects/namespaced/monitoring/persistentvolumeclaims/storage-loki-0.yaml\ncd+++++++++ objects/namespaced/monitoring/pods/\n>f+++++++++ objects/namespaced/monitoring/pods/alertmanager-kube-prometheus-stack-alertmanager-0.txt\n>f+++++++++ objects/namespaced/monitoring/pods/alertmanager-kube-prometheus-stack-alertmanager-0.yaml\n>f+++++++++ objects/namespaced/monitoring/pods/goldpinger-7jzp8.txt\n>f+++++++++ objects/namespaced/monitoring/pods/goldpinger-7jzp8.yaml\n>f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-grafana-668bfb9659-ft52b.txt\n>f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-grafana-668bfb9659-ft52b.yaml\n>f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m.txt\n>f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m.yaml\n>f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-operator-cd88cf4bf-lzh7g.txt\n>f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-operator-cd88cf4bf-lzh7g.yaml\n>f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-prometheus-node-exporter-59qlm.txt\n>f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-prometheus-node-exporter-59qlm.yaml\n>f+++++++++ objects/namespaced/monitoring/pods/loki-0.txt\n>f+++++++++ objects/namespaced/monitoring/pods/loki-0.yaml\n>f+++++++++ objects/namespaced/monitoring/pods/loki-chunks-cache-0.txt\n>f+++++++++ objects/namespaced/monitoring/pods/loki-chunks-cache-0.yaml\n>f+++++++++ objects/namespaced/monitoring/pods/loki-gateway-cf54cb88c-zv654.txt\n>f+++++++++ objects/namespaced/monitoring/pods/loki-gateway-cf54cb88c-zv654.yaml\n>f+++++++++ objects/namespaced/monitoring/pods/loki-results-cache-0.txt\n>f+++++++++ objects/namespaced/monitoring/pods/loki-results-cache-0.yaml\n>f+++++++++ objects/namespaced/monitoring/pods/node-feature-discovery-gc-6675cbb6d9-zv9sn.txt\n>f+++++++++ objects/namespaced/monitoring/pods/node-feature-discovery-gc-6675cbb6d9-zv9sn.yaml\n>f+++++++++ objects/namespaced/monitoring/pods/node-feature-discovery-master-8665476dbc-t4z5z.txt\n>f+++++++++ objects/namespaced/monitoring/pods/node-feature-discovery-master-8665476dbc-t4z5z.yaml\n>f+++++++++ objects/namespaced/monitoring/pods/node-feature-discovery-worker-p8lmk.txt\n>f+++++++++ objects/namespaced/monitoring/pods/node-feature-discovery-worker-p8lmk.yaml\n>f+++++++++ objects/namespaced/monitoring/pods/prometheus-kube-prometheus-stack-prometheus-0.txt\n>f+++++++++ objects/namespaced/monitoring/pods/prometheus-kube-prometheus-stack-prometheus-0.yaml\n>f+++++++++ objects/namespaced/monitoring/pods/prometheus-pushgateway-7b8659c68b-28dht.txt\n>f+++++++++ objects/namespaced/monitoring/pods/prometheus-pushgateway-7b8659c68b-28dht.yaml\n>f+++++++++ objects/namespaced/monitoring/pods/vector-qzjms.txt\n>f+++++++++ objects/namespaced/monitoring/pods/vector-qzjms.yaml\ncd+++++++++ objects/namespaced/monitoring/rolebindings/\n>f+++++++++ objects/namespaced/monitoring/rolebindings/kube-prometheus-stack-grafana.txt\n>f+++++++++ objects/namespaced/monitoring/rolebindings/kube-prometheus-stack-grafana.yaml\n>f+++++++++ objects/namespaced/monitoring/rolebindings/kube-prometheus-stack-pod-tls-sidecar.txt\n>f+++++++++ objects/namespaced/monitoring/rolebindings/kube-prometheus-stack-pod-tls-sidecar.yaml\n>f+++++++++ objects/namespaced/monitoring/rolebindings/node-feature-discovery-worker.txt\n>f+++++++++ objects/namespaced/monitoring/rolebindings/node-feature-discovery-worker.yaml\ncd+++++++++ objects/namespaced/monitoring/roles/\n>f+++++++++ objects/namespaced/monitoring/roles/kube-prometheus-stack-grafana.txt\n>f+++++++++ objects/namespaced/monitoring/roles/kube-prometheus-stack-grafana.yaml\n>f+++++++++ objects/namespaced/monitoring/roles/kube-prometheus-stack-pod-tls-sidecar.txt\n>f+++++++++ objects/namespaced/monitoring/roles/kube-prometheus-stack-pod-tls-sidecar.yaml\n>f+++++++++ objects/namespaced/monitoring/roles/node-feature-discovery-worker.txt\n>f+++++++++ objects/namespaced/monitoring/roles/node-feature-discovery-worker.yaml\ncd+++++++++ objects/namespaced/monitoring/secrets/\n>f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-generated.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-generated.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-tls-assets-0.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-tls-assets-0.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-web-config.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-web-config.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-tls.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-tls.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/grafana-tls.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/grafana-tls.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-admission.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-admission.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-alertmanager-client-secret.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-alertmanager-client-secret.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-alertmanager-cookie-secret.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-alertmanager-cookie-secret.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-alertmanager-oauth2-proxy.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-alertmanager-oauth2-proxy.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-etcd-client-cert.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-etcd-client-cert.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-grafana-client-secret.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-grafana-client-secret.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-grafana.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-grafana.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-client-secret.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-client-secret.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-cookie-secret.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-cookie-secret.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-node-exporter-59qlm-tls.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-node-exporter-59qlm-tls.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-oauth2-proxy.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-oauth2-proxy.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus-0-tls.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus-0-tls.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus-tls-assets-0.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus-tls-assets-0.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus-web-config.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus-web-config.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/prometheus-tls.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/prometheus-tls.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.goldpinger.v1.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.goldpinger.v1.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.kube-prometheus-stack.v1.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.kube-prometheus-stack.v1.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.loki.v1.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.loki.v1.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.node-feature-discovery.v1.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.node-feature-discovery.v1.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.prometheus-pushgateway.v1.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.prometheus-pushgateway.v1.yaml\n>f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.vector.v1.txt\n>f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.vector.v1.yaml\ncd+++++++++ objects/namespaced/monitoring/serviceaccounts/\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/default.yaml\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/goldpinger.txt\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/goldpinger.yaml\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-alertmanager.txt\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-alertmanager.yaml\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-grafana.txt\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-grafana.yaml\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-kube-state-metrics.txt\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-kube-state-metrics.yaml\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-operator.txt\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-operator.yaml\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-prometheus-node-exporter.txt\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-prometheus-node-exporter.yaml\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-prometheus.txt\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-prometheus.yaml\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/loki.txt\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/loki.yaml\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/node-feature-discovery-gc.txt\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/node-feature-discovery-gc.yaml\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/node-feature-discovery-worker.txt\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/node-feature-discovery-worker.yaml\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/node-feature-discovery.txt\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/node-feature-discovery.yaml\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/prometheus-pushgateway.txt\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/prometheus-pushgateway.yaml\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/vector.txt\n>f+++++++++ objects/namespaced/monitoring/serviceaccounts/vector.yaml\ncd+++++++++ objects/namespaced/monitoring/services/\n>f+++++++++ objects/namespaced/monitoring/services/alertmanager-operated.txt\n>f+++++++++ objects/namespaced/monitoring/services/alertmanager-operated.yaml\n>f+++++++++ objects/namespaced/monitoring/services/goldpinger.txt\n>f+++++++++ objects/namespaced/monitoring/services/goldpinger.yaml\n>f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-alertmanager.txt\n>f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-alertmanager.yaml\n>f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-grafana.txt\n>f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-grafana.yaml\n>f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-kube-state-metrics.txt\n>f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-kube-state-metrics.yaml\n>f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-operator.txt\n>f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-operator.yaml\n>f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-prometheus-node-exporter.txt\n>f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-prometheus-node-exporter.yaml\n>f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-prometheus.txt\n>f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-prometheus.yaml\n>f+++++++++ objects/namespaced/monitoring/services/loki-chunks-cache.txt\n>f+++++++++ objects/namespaced/monitoring/services/loki-chunks-cache.yaml\n>f+++++++++ objects/namespaced/monitoring/services/loki-gateway.txt\n>f+++++++++ objects/namespaced/monitoring/services/loki-gateway.yaml\n>f+++++++++ objects/namespaced/monitoring/services/loki-headless.txt\n>f+++++++++ objects/namespaced/monitoring/services/loki-headless.yaml\n>f+++++++++ objects/namespaced/monitoring/services/loki-memberlist.txt\n>f+++++++++ objects/namespaced/monitoring/services/loki-memberlist.yaml\n>f+++++++++ objects/namespaced/monitoring/services/loki-results-cache.txt\n>f+++++++++ objects/namespaced/monitoring/services/loki-results-cache.yaml\n>f+++++++++ objects/namespaced/monitoring/services/loki.txt\n>f+++++++++ objects/namespaced/monitoring/services/loki.yaml\n>f+++++++++ objects/namespaced/monitoring/services/prometheus-operated.txt\n>f+++++++++ objects/namespaced/monitoring/services/prometheus-operated.yaml\n>f+++++++++ objects/namespaced/monitoring/services/prometheus-pushgateway.txt\n>f+++++++++ objects/namespaced/monitoring/services/prometheus-pushgateway.yaml\n>f+++++++++ objects/namespaced/monitoring/services/vector-headless.txt\n>f+++++++++ objects/namespaced/monitoring/services/vector-headless.yaml\ncd+++++++++ objects/namespaced/monitoring/statefulsets/\n>f+++++++++ objects/namespaced/monitoring/statefulsets/alertmanager-kube-prometheus-stack-alertmanager.txt\n>f+++++++++ objects/namespaced/monitoring/statefulsets/alertmanager-kube-prometheus-stack-alertmanager.yaml\n>f+++++++++ objects/namespaced/monitoring/statefulsets/loki-chunks-cache.txt\n>f+++++++++ objects/namespaced/monitoring/statefulsets/loki-chunks-cache.yaml\n>f+++++++++ objects/namespaced/monitoring/statefulsets/loki-results-cache.txt\n>f+++++++++ objects/namespaced/monitoring/statefulsets/loki-results-cache.yaml\n>f+++++++++ objects/namespaced/monitoring/statefulsets/loki.txt\n>f+++++++++ objects/namespaced/monitoring/statefulsets/loki.yaml\n>f+++++++++ objects/namespaced/monitoring/statefulsets/prometheus-kube-prometheus-stack-prometheus.txt\n>f+++++++++ objects/namespaced/monitoring/statefulsets/prometheus-kube-prometheus-stack-prometheus.yaml\ncd+++++++++ objects/namespaced/openstack/\ncd+++++++++ objects/namespaced/openstack/configmaps/\n>f+++++++++ objects/namespaced/openstack/configmaps/barbican-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/barbican-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/ceph-csi-config.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/ceph-csi-config.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/ceph-etc.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/ceph-etc.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/cinder-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/cinder-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/glance-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/glance-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/heat-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/heat-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/horizon-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/horizon-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/keepalived-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/keepalived-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/keystone-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/keystone-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/keystone-openid-metadata.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/keystone-openid-metadata.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/kube-root-ca.crt.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/libvirt-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/libvirt-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/libvirt-libvirt-default-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/libvirt-libvirt-default-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/magnum-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/magnum-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/magnum-cluster-api-proxy-config.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/magnum-cluster-api-proxy-config.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/manila-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/manila-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/memcached-memcached-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/memcached-memcached-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/neutron-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/neutron-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/nova-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/nova-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/octavia-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/octavia-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/openvswitch-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/openvswitch-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/ovn-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/ovn-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/percona-xtradb-haproxy.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/percona-xtradb-haproxy.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/percona-xtradb-pxc.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/percona-xtradb-pxc.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/placement-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/placement-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-barbican-plugins-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-barbican-plugins-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-barbican-server-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-barbican-server-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-cinder-plugins-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-cinder-plugins-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-cinder-server-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-cinder-server-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-glance-plugins-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-glance-plugins-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-glance-server-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-glance-server-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-heat-plugins-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-heat-plugins-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-heat-server-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-heat-server-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-keystone-plugins-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-keystone-plugins-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-keystone-server-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-keystone-server-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-magnum-plugins-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-magnum-plugins-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-magnum-server-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-magnum-server-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-manila-plugins-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-manila-plugins-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-manila-server-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-manila-server-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-neutron-plugins-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-neutron-plugins-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-neutron-server-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-neutron-server-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-nova-plugins-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-nova-plugins-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-nova-server-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-nova-server-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-octavia-plugins-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-octavia-plugins-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-octavia-server-conf.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-octavia-server-conf.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rook-ceph-mon-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rook-ceph-mon-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rook-ceph-pdbstatemap.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rook-ceph-pdbstatemap.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rook-ceph-rgw-ceph-mime-types.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rook-ceph-rgw-ceph-mime-types.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/rook-config-override.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/rook-config-override.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/staffeln-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/staffeln-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/tempest-bin.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/tempest-bin.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/valkey-configuration.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/valkey-configuration.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/valkey-health.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/valkey-health.yaml\n>f+++++++++ objects/namespaced/openstack/configmaps/valkey-scripts.txt\n>f+++++++++ objects/namespaced/openstack/configmaps/valkey-scripts.yaml\ncd+++++++++ objects/namespaced/openstack/cronjobs/\n>f+++++++++ objects/namespaced/openstack/cronjobs/cinder-volume-usage-audit.txt\n>f+++++++++ objects/namespaced/openstack/cronjobs/cinder-volume-usage-audit.yaml\n>f+++++++++ objects/namespaced/openstack/cronjobs/heat-engine-cleaner.txt\n>f+++++++++ objects/namespaced/openstack/cronjobs/heat-engine-cleaner.yaml\n>f+++++++++ objects/namespaced/openstack/cronjobs/heat-purge-deleted.txt\n>f+++++++++ objects/namespaced/openstack/cronjobs/heat-purge-deleted.yaml\n>f+++++++++ objects/namespaced/openstack/cronjobs/keystone-credential-rotate.txt\n>f+++++++++ objects/namespaced/openstack/cronjobs/keystone-credential-rotate.yaml\n>f+++++++++ objects/namespaced/openstack/cronjobs/keystone-fernet-rotate.txt\n>f+++++++++ objects/namespaced/openstack/cronjobs/keystone-fernet-rotate.yaml\n>f+++++++++ objects/namespaced/openstack/cronjobs/nova-cell-setup.txt\n>f+++++++++ objects/namespaced/openstack/cronjobs/nova-cell-setup.yaml\n>f+++++++++ objects/namespaced/openstack/cronjobs/nova-service-cleaner.txt\n>f+++++++++ objects/namespaced/openstack/cronjobs/nova-service-cleaner.yaml\ncd+++++++++ objects/namespaced/openstack/daemonsets/\n>f+++++++++ objects/namespaced/openstack/daemonsets/keepalived.txt\n>f+++++++++ objects/namespaced/openstack/daemonsets/keepalived.yaml\n>f+++++++++ objects/namespaced/openstack/daemonsets/libvirt-libvirt-default.txt\n>f+++++++++ objects/namespaced/openstack/daemonsets/libvirt-libvirt-default.yaml\n>f+++++++++ objects/namespaced/openstack/daemonsets/magnum-cluster-api-proxy.txt\n>f+++++++++ objects/namespaced/openstack/daemonsets/magnum-cluster-api-proxy.yaml\n>f+++++++++ objects/namespaced/openstack/daemonsets/neutron-netns-cleanup-cron-default.txt\n>f+++++++++ objects/namespaced/openstack/daemonsets/neutron-netns-cleanup-cron-default.yaml\n>f+++++++++ objects/namespaced/openstack/daemonsets/neutron-ovn-metadata-agent-default.txt\n>f+++++++++ objects/namespaced/openstack/daemonsets/neutron-ovn-metadata-agent-default.yaml\n>f+++++++++ objects/namespaced/openstack/daemonsets/nova-compute-default.txt\n>f+++++++++ objects/namespaced/openstack/daemonsets/nova-compute-default.yaml\n>f+++++++++ objects/namespaced/openstack/daemonsets/octavia-health-manager-default.txt\n>f+++++++++ objects/namespaced/openstack/daemonsets/octavia-health-manager-default.yaml\n>f+++++++++ objects/namespaced/openstack/daemonsets/openvswitch.txt\n>f+++++++++ objects/namespaced/openstack/daemonsets/openvswitch.yaml\n>f+++++++++ objects/namespaced/openstack/daemonsets/ovn-controller.txt\n>f+++++++++ objects/namespaced/openstack/daemonsets/ovn-controller.yaml\ncd+++++++++ objects/namespaced/openstack/deployment/\n>f+++++++++ objects/namespaced/openstack/deployment/barbican-api.txt\n>f+++++++++ objects/namespaced/openstack/deployment/barbican-api.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/cinder-api.txt\n>f+++++++++ objects/namespaced/openstack/deployment/cinder-api.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/cinder-backup.txt\n>f+++++++++ objects/namespaced/openstack/deployment/cinder-backup.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/cinder-scheduler.txt\n>f+++++++++ objects/namespaced/openstack/deployment/cinder-scheduler.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/cinder-volume.txt\n>f+++++++++ objects/namespaced/openstack/deployment/cinder-volume.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/glance-api.txt\n>f+++++++++ objects/namespaced/openstack/deployment/glance-api.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/heat-api.txt\n>f+++++++++ objects/namespaced/openstack/deployment/heat-api.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/heat-cfn.txt\n>f+++++++++ objects/namespaced/openstack/deployment/heat-cfn.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/heat-engine.txt\n>f+++++++++ objects/namespaced/openstack/deployment/heat-engine.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/horizon.txt\n>f+++++++++ objects/namespaced/openstack/deployment/horizon.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/keystone-api.txt\n>f+++++++++ objects/namespaced/openstack/deployment/keystone-api.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/magnum-api.txt\n>f+++++++++ objects/namespaced/openstack/deployment/magnum-api.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/magnum-registry.txt\n>f+++++++++ objects/namespaced/openstack/deployment/magnum-registry.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/manila-api.txt\n>f+++++++++ objects/namespaced/openstack/deployment/manila-api.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/manila-data.txt\n>f+++++++++ objects/namespaced/openstack/deployment/manila-data.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/manila-scheduler.txt\n>f+++++++++ objects/namespaced/openstack/deployment/manila-scheduler.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/manila-share.txt\n>f+++++++++ objects/namespaced/openstack/deployment/manila-share.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/memcached-memcached.txt\n>f+++++++++ objects/namespaced/openstack/deployment/memcached-memcached.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/neutron-server.txt\n>f+++++++++ objects/namespaced/openstack/deployment/neutron-server.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/nova-api-metadata.txt\n>f+++++++++ objects/namespaced/openstack/deployment/nova-api-metadata.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/nova-api-osapi.txt\n>f+++++++++ objects/namespaced/openstack/deployment/nova-api-osapi.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/nova-conductor.txt\n>f+++++++++ objects/namespaced/openstack/deployment/nova-conductor.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/nova-novncproxy.txt\n>f+++++++++ objects/namespaced/openstack/deployment/nova-novncproxy.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/nova-scheduler.txt\n>f+++++++++ objects/namespaced/openstack/deployment/nova-scheduler.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/octavia-api.txt\n>f+++++++++ objects/namespaced/openstack/deployment/octavia-api.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/octavia-housekeeping.txt\n>f+++++++++ objects/namespaced/openstack/deployment/octavia-housekeeping.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/octavia-worker.txt\n>f+++++++++ objects/namespaced/openstack/deployment/octavia-worker.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/openstack-database-exporter.txt\n>f+++++++++ objects/namespaced/openstack/deployment/openstack-database-exporter.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/openstack-exporter.txt\n>f+++++++++ objects/namespaced/openstack/deployment/openstack-exporter.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/ovn-northd.txt\n>f+++++++++ objects/namespaced/openstack/deployment/ovn-northd.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/placement-api.txt\n>f+++++++++ objects/namespaced/openstack/deployment/placement-api.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/pxc-operator.txt\n>f+++++++++ objects/namespaced/openstack/deployment/pxc-operator.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/rabbitmq-cluster-operator.txt\n>f+++++++++ objects/namespaced/openstack/deployment/rabbitmq-cluster-operator.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/rabbitmq-messaging-topology-operator.txt\n>f+++++++++ objects/namespaced/openstack/deployment/rabbitmq-messaging-topology-operator.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/rook-ceph-crashcollector-instance.txt\n>f+++++++++ objects/namespaced/openstack/deployment/rook-ceph-crashcollector-instance.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/rook-ceph-rgw-ceph-a.txt\n>f+++++++++ objects/namespaced/openstack/deployment/rook-ceph-rgw-ceph-a.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/staffeln-api.txt\n>f+++++++++ objects/namespaced/openstack/deployment/staffeln-api.yaml\n>f+++++++++ objects/namespaced/openstack/deployment/staffeln-conductor.txt\n>f+++++++++ objects/namespaced/openstack/deployment/staffeln-conductor.yaml\ncd+++++++++ objects/namespaced/openstack/endpoints/\n>f+++++++++ objects/namespaced/openstack/endpoints/barbican-api.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/barbican-api.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/ceph-mon.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/ceph-mon.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/cinder-api.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/cinder-api.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/glance-api.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/glance-api.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/heat-api.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/heat-api.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/heat-cfn.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/heat-cfn.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/horizon-int.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/horizon-int.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/horizon.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/horizon.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/keystone-api.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/keystone-api.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/magnum-api.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/magnum-api.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/magnum-registry.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/magnum-registry.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/manila-api.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/manila-api.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/memcached-metrics.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/memcached-metrics.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/memcached.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/memcached.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/neutron-server.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/neutron-server.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/nova-api.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/nova-api.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/nova-metadata.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/nova-metadata.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/nova-novncproxy.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/nova-novncproxy.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/octavia-api.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/octavia-api.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/openstack-exporter.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/openstack-exporter.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/ovn-ovsdb-nb.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/ovn-ovsdb-nb.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/ovn-ovsdb-sb.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/ovn-ovsdb-sb.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-cluster-operator.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-cluster-operator.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-haproxy-metrics.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-haproxy-metrics.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-haproxy-replicas.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-haproxy-replicas.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-haproxy.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-haproxy.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-pxc-unready.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-pxc-unready.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-pxc.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-pxc.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/placement-api.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/placement-api.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-barbican-nodes.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-barbican-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-barbican.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-barbican.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-cinder-nodes.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-cinder-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-cinder.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-cinder.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-glance-nodes.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-glance-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-glance.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-glance.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-heat-nodes.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-heat-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-heat.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-heat.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-keystone-nodes.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-keystone-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-keystone.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-keystone.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-magnum-nodes.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-magnum-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-magnum.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-magnum.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-manila-nodes.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-manila-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-manila.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-manila.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-messaging-topology-operator-webhook.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-messaging-topology-operator-webhook.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-neutron-nodes.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-neutron-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-neutron.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-neutron.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-nova-nodes.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-nova-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-nova.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-nova.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-octavia-nodes.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-octavia-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-octavia.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-octavia.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/rook-ceph-rgw-ceph.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/rook-ceph-rgw-ceph.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/staffeln-api.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/staffeln-api.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/valkey-headless.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/valkey-headless.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/valkey-metrics.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/valkey-metrics.yaml\n>f+++++++++ objects/namespaced/openstack/endpoints/valkey.txt\n>f+++++++++ objects/namespaced/openstack/endpoints/valkey.yaml\ncd+++++++++ objects/namespaced/openstack/ingresses/\n>f+++++++++ objects/namespaced/openstack/ingresses/cloudformation.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/cloudformation.yaml\n>f+++++++++ objects/namespaced/openstack/ingresses/compute-novnc-proxy.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/compute-novnc-proxy.yaml\n>f+++++++++ objects/namespaced/openstack/ingresses/compute.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/compute.yaml\n>f+++++++++ objects/namespaced/openstack/ingresses/container-infra-registry.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/container-infra-registry.yaml\n>f+++++++++ objects/namespaced/openstack/ingresses/container-infra.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/container-infra.yaml\n>f+++++++++ objects/namespaced/openstack/ingresses/dashboard.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/dashboard.yaml\n>f+++++++++ objects/namespaced/openstack/ingresses/identity.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/identity.yaml\n>f+++++++++ objects/namespaced/openstack/ingresses/image.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/image.yaml\n>f+++++++++ objects/namespaced/openstack/ingresses/key-manager.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/key-manager.yaml\n>f+++++++++ objects/namespaced/openstack/ingresses/load-balancer.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/load-balancer.yaml\n>f+++++++++ objects/namespaced/openstack/ingresses/network.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/network.yaml\n>f+++++++++ objects/namespaced/openstack/ingresses/orchestration.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/orchestration.yaml\n>f+++++++++ objects/namespaced/openstack/ingresses/placement.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/placement.yaml\n>f+++++++++ objects/namespaced/openstack/ingresses/rook-ceph-cluster.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/rook-ceph-cluster.yaml\n>f+++++++++ objects/namespaced/openstack/ingresses/sharev2.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/sharev2.yaml\n>f+++++++++ objects/namespaced/openstack/ingresses/volumev3.txt\n>f+++++++++ objects/namespaced/openstack/ingresses/volumev3.yaml\ncd+++++++++ objects/namespaced/openstack/jobs/\n>f+++++++++ objects/namespaced/openstack/jobs/barbican-db-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/barbican-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/barbican-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/jobs/barbican-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/barbican-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/jobs/barbican-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/barbican-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/jobs/barbican-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/barbican-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/jobs/barbican-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/barbican-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/barbican-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-backup-storage-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-backup-storage-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-create-internal-tenant.txt\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-create-internal-tenant.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-db-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-storage-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-storage-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-volume-usage-audit-29540045.txt\n>f+++++++++ objects/namespaced/openstack/jobs/cinder-volume-usage-audit-29540045.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/glance-db-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/glance-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/glance-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/jobs/glance-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/glance-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/jobs/glance-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/glance-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/jobs/glance-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/glance-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/jobs/glance-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/glance-metadefs-load.txt\n>f+++++++++ objects/namespaced/openstack/jobs/glance-metadefs-load.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/glance-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/glance-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/glance-storage-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/glance-storage-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/heat-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/jobs/heat-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/heat-db-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/heat-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/heat-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/jobs/heat-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/heat-domain-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/jobs/heat-domain-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/heat-engine-cleaner-29540055.txt\n>f+++++++++ objects/namespaced/openstack/jobs/heat-engine-cleaner-29540055.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/heat-engine-cleaner-29540060.txt\n>f+++++++++ objects/namespaced/openstack/jobs/heat-engine-cleaner-29540060.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/heat-engine-cleaner-29540065.txt\n>f+++++++++ objects/namespaced/openstack/jobs/heat-engine-cleaner-29540065.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/heat-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/jobs/heat-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/heat-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/jobs/heat-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/heat-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/jobs/heat-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/heat-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/heat-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/heat-trusts.txt\n>f+++++++++ objects/namespaced/openstack/jobs/heat-trusts.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/horizon-db-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/horizon-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/horizon-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/jobs/horizon-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/keystone-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/jobs/keystone-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/keystone-credential-setup.txt\n>f+++++++++ objects/namespaced/openstack/jobs/keystone-credential-setup.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/keystone-db-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/keystone-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/keystone-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/jobs/keystone-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/keystone-domain-manage.txt\n>f+++++++++ objects/namespaced/openstack/jobs/keystone-domain-manage.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/keystone-fernet-setup.txt\n>f+++++++++ objects/namespaced/openstack/jobs/keystone-fernet-setup.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/keystone-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/keystone-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/magnum-db-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/magnum-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/magnum-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/jobs/magnum-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/magnum-domain-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/jobs/magnum-domain-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/magnum-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/jobs/magnum-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/magnum-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/jobs/magnum-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/magnum-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/jobs/magnum-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/magnum-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/magnum-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/manila-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/jobs/manila-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/manila-db-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/manila-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/manila-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/jobs/manila-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/manila-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/jobs/manila-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/manila-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/jobs/manila-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/manila-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/jobs/manila-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/manila-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/manila-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/neutron-db-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/neutron-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/neutron-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/jobs/neutron-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/neutron-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/jobs/neutron-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/neutron-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/jobs/neutron-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/neutron-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/jobs/neutron-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/neutron-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/neutron-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/nova-cell-setup-29540040.txt\n>f+++++++++ objects/namespaced/openstack/jobs/nova-cell-setup-29540040.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/nova-db-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/nova-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/nova-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/jobs/nova-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/nova-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/jobs/nova-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/nova-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/jobs/nova-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/nova-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/jobs/nova-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/nova-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/nova-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/nova-service-cleaner-29540040.txt\n>f+++++++++ objects/namespaced/openstack/jobs/nova-service-cleaner-29540040.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/octavia-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/jobs/octavia-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/octavia-db-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/octavia-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/octavia-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/jobs/octavia-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/octavia-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/jobs/octavia-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/octavia-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/jobs/octavia-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/octavia-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/jobs/octavia-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/octavia-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/octavia-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/placement-db-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/placement-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/placement-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/jobs/placement-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/placement-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/jobs/placement-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/placement-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/jobs/placement-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/placement-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/jobs/placement-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/staffeln-db-init.txt\n>f+++++++++ objects/namespaced/openstack/jobs/staffeln-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/staffeln-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/jobs/staffeln-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/tempest-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/jobs/tempest-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/jobs/tempest-run-tests.txt\n>f+++++++++ objects/namespaced/openstack/jobs/tempest-run-tests.yaml\ncd+++++++++ objects/namespaced/openstack/networkpolicies/\n>f+++++++++ objects/namespaced/openstack/networkpolicies/rabbitmq-cluster-operator.txt\n>f+++++++++ objects/namespaced/openstack/networkpolicies/rabbitmq-cluster-operator.yaml\n>f+++++++++ objects/namespaced/openstack/networkpolicies/rabbitmq-messaging-topology-operator.txt\n>f+++++++++ objects/namespaced/openstack/networkpolicies/rabbitmq-messaging-topology-operator.yaml\n>f+++++++++ objects/namespaced/openstack/networkpolicies/valkey.txt\n>f+++++++++ objects/namespaced/openstack/networkpolicies/valkey.yaml\ncd+++++++++ objects/namespaced/openstack/persistentvolumeclaims/\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/data-ovn-ovsdb-nb-0.txt\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/data-ovn-ovsdb-nb-0.yaml\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/data-ovn-ovsdb-sb-0.txt\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/data-ovn-ovsdb-sb-0.yaml\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/datadir-percona-xtradb-pxc-0.txt\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/datadir-percona-xtradb-pxc-0.yaml\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-barbican-server-0.txt\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-barbican-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-cinder-server-0.txt\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-cinder-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-glance-server-0.txt\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-glance-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-heat-server-0.txt\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-heat-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-keystone-server-0.txt\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-keystone-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-magnum-server-0.txt\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-magnum-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-manila-server-0.txt\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-manila-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-neutron-server-0.txt\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-neutron-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-nova-server-0.txt\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-nova-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-octavia-server-0.txt\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-octavia-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/valkey-data-valkey-node-0.txt\n>f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/valkey-data-valkey-node-0.yaml\ncd+++++++++ objects/namespaced/openstack/pods/\n>f+++++++++ objects/namespaced/openstack/pods/barbican-api-775987496d-z6jqv.txt\n>f+++++++++ objects/namespaced/openstack/pods/barbican-api-775987496d-z6jqv.yaml\n>f+++++++++ objects/namespaced/openstack/pods/barbican-db-init-nm8k6.txt\n>f+++++++++ objects/namespaced/openstack/pods/barbican-db-init-nm8k6.yaml\n>f+++++++++ objects/namespaced/openstack/pods/barbican-db-sync-452x5.txt\n>f+++++++++ objects/namespaced/openstack/pods/barbican-db-sync-452x5.yaml\n>f+++++++++ objects/namespaced/openstack/pods/barbican-ks-endpoints-w2ffg.txt\n>f+++++++++ objects/namespaced/openstack/pods/barbican-ks-endpoints-w2ffg.yaml\n>f+++++++++ objects/namespaced/openstack/pods/barbican-ks-service-8pm7j.txt\n>f+++++++++ objects/namespaced/openstack/pods/barbican-ks-service-8pm7j.yaml\n>f+++++++++ objects/namespaced/openstack/pods/barbican-ks-user-fszfr.txt\n>f+++++++++ objects/namespaced/openstack/pods/barbican-ks-user-fszfr.yaml\n>f+++++++++ objects/namespaced/openstack/pods/barbican-rabbit-init-j5qmd.txt\n>f+++++++++ objects/namespaced/openstack/pods/barbican-rabbit-init-j5qmd.yaml\n>f+++++++++ objects/namespaced/openstack/pods/cinder-api-86d7694f66-j97gj.txt\n>f+++++++++ objects/namespaced/openstack/pods/cinder-api-86d7694f66-j97gj.yaml\n>f+++++++++ objects/namespaced/openstack/pods/cinder-backup-dcfd7dfb7-sdwkc.txt\n>f+++++++++ objects/namespaced/openstack/pods/cinder-backup-dcfd7dfb7-sdwkc.yaml\n>f+++++++++ objects/namespaced/openstack/pods/cinder-backup-storage-init-zmnkh.txt\n>f+++++++++ objects/namespaced/openstack/pods/cinder-backup-storage-init-zmnkh.yaml\n>f+++++++++ objects/namespaced/openstack/pods/cinder-bootstrap-wng86.txt\n>f+++++++++ objects/namespaced/openstack/pods/cinder-bootstrap-wng86.yaml\n>f+++++++++ objects/namespaced/openstack/pods/cinder-create-internal-tenant-6vgll.txt\n>f+++++++++ objects/namespaced/openstack/pods/cinder-create-internal-tenant-6vgll.yaml\n>f+++++++++ objects/namespaced/openstack/pods/cinder-db-init-mzm5b.txt\n>f+++++++++ objects/namespaced/openstack/pods/cinder-db-init-mzm5b.yaml\n>f+++++++++ objects/namespaced/openstack/pods/cinder-db-sync-mz6ls.txt\n>f+++++++++ objects/namespaced/openstack/pods/cinder-db-sync-mz6ls.yaml\n>f+++++++++ objects/namespaced/openstack/pods/cinder-ks-endpoints-xv2tb.txt\n>f+++++++++ objects/namespaced/openstack/pods/cinder-ks-endpoints-xv2tb.yaml\n>f+++++++++ objects/namespaced/openstack/pods/cinder-ks-service-dlcxz.txt\n>f+++++++++ objects/namespaced/openstack/pods/cinder-ks-service-dlcxz.yaml\n>f+++++++++ objects/namespaced/openstack/pods/cinder-ks-user-5bd5g.txt\n>f+++++++++ objects/namespaced/openstack/pods/cinder-ks-user-5bd5g.yaml\n>f+++++++++ objects/namespaced/openstack/pods/cinder-rabbit-init-l4fpm.txt\n>f+++++++++ objects/namespaced/openstack/pods/cinder-rabbit-init-l4fpm.yaml\n>f+++++++++ objects/namespaced/openstack/pods/cinder-scheduler-586f444995-p7grf.txt\n>f+++++++++ objects/namespaced/openstack/pods/cinder-scheduler-586f444995-p7grf.yaml\n>f+++++++++ objects/namespaced/openstack/pods/cinder-storage-init-vt6br.txt\n>f+++++++++ objects/namespaced/openstack/pods/cinder-storage-init-vt6br.yaml\n>f+++++++++ objects/namespaced/openstack/pods/cinder-volume-66dc847979-qgp4l.txt\n>f+++++++++ objects/namespaced/openstack/pods/cinder-volume-66dc847979-qgp4l.yaml\n>f+++++++++ objects/namespaced/openstack/pods/cinder-volume-usage-audit-29540045-jbmvh.txt\n>f+++++++++ objects/namespaced/openstack/pods/cinder-volume-usage-audit-29540045-jbmvh.yaml\n>f+++++++++ objects/namespaced/openstack/pods/glance-api-65d579bfc8-6x76l.txt\n>f+++++++++ objects/namespaced/openstack/pods/glance-api-65d579bfc8-6x76l.yaml\n>f+++++++++ objects/namespaced/openstack/pods/glance-db-init-wbpff.txt\n>f+++++++++ objects/namespaced/openstack/pods/glance-db-init-wbpff.yaml\n>f+++++++++ objects/namespaced/openstack/pods/glance-db-sync-gk84f.txt\n>f+++++++++ objects/namespaced/openstack/pods/glance-db-sync-gk84f.yaml\n>f+++++++++ objects/namespaced/openstack/pods/glance-ks-endpoints-dq2cc.txt\n>f+++++++++ objects/namespaced/openstack/pods/glance-ks-endpoints-dq2cc.yaml\n>f+++++++++ objects/namespaced/openstack/pods/glance-ks-service-5h6bw.txt\n>f+++++++++ objects/namespaced/openstack/pods/glance-ks-service-5h6bw.yaml\n>f+++++++++ objects/namespaced/openstack/pods/glance-ks-user-lcfxr.txt\n>f+++++++++ objects/namespaced/openstack/pods/glance-ks-user-lcfxr.yaml\n>f+++++++++ objects/namespaced/openstack/pods/glance-metadefs-load-476tp.txt\n>f+++++++++ objects/namespaced/openstack/pods/glance-metadefs-load-476tp.yaml\n>f+++++++++ objects/namespaced/openstack/pods/glance-rabbit-init-c6rjt.txt\n>f+++++++++ objects/namespaced/openstack/pods/glance-rabbit-init-c6rjt.yaml\n>f+++++++++ objects/namespaced/openstack/pods/glance-storage-init-hdcpc.txt\n>f+++++++++ objects/namespaced/openstack/pods/glance-storage-init-hdcpc.yaml\n>f+++++++++ objects/namespaced/openstack/pods/heat-api-6d65f9477-kmbkl.txt\n>f+++++++++ objects/namespaced/openstack/pods/heat-api-6d65f9477-kmbkl.yaml\n>f+++++++++ objects/namespaced/openstack/pods/heat-bootstrap-9dwg2.txt\n>f+++++++++ objects/namespaced/openstack/pods/heat-bootstrap-9dwg2.yaml\n>f+++++++++ objects/namespaced/openstack/pods/heat-cfn-f44db7787-t8f7m.txt\n>f+++++++++ objects/namespaced/openstack/pods/heat-cfn-f44db7787-t8f7m.yaml\n>f+++++++++ objects/namespaced/openstack/pods/heat-db-init-fk8qw.txt\n>f+++++++++ objects/namespaced/openstack/pods/heat-db-init-fk8qw.yaml\n>f+++++++++ objects/namespaced/openstack/pods/heat-db-sync-cxmcb.txt\n>f+++++++++ objects/namespaced/openstack/pods/heat-db-sync-cxmcb.yaml\n>f+++++++++ objects/namespaced/openstack/pods/heat-domain-ks-user-tq2c5.txt\n>f+++++++++ objects/namespaced/openstack/pods/heat-domain-ks-user-tq2c5.yaml\n>f+++++++++ objects/namespaced/openstack/pods/heat-engine-64f8b77bfb-wngkr.txt\n>f+++++++++ objects/namespaced/openstack/pods/heat-engine-64f8b77bfb-wngkr.yaml\n>f+++++++++ objects/namespaced/openstack/pods/heat-engine-cleaner-29540055-p9pq9.txt\n>f+++++++++ objects/namespaced/openstack/pods/heat-engine-cleaner-29540055-p9pq9.yaml\n>f+++++++++ objects/namespaced/openstack/pods/heat-engine-cleaner-29540060-z4g95.txt\n>f+++++++++ objects/namespaced/openstack/pods/heat-engine-cleaner-29540060-z4g95.yaml\n>f+++++++++ objects/namespaced/openstack/pods/heat-engine-cleaner-29540065-rcjr2.txt\n>f+++++++++ objects/namespaced/openstack/pods/heat-engine-cleaner-29540065-rcjr2.yaml\n>f+++++++++ objects/namespaced/openstack/pods/heat-ks-endpoints-wwzbz.txt\n>f+++++++++ objects/namespaced/openstack/pods/heat-ks-endpoints-wwzbz.yaml\n>f+++++++++ objects/namespaced/openstack/pods/heat-ks-service-8pxqz.txt\n>f+++++++++ objects/namespaced/openstack/pods/heat-ks-service-8pxqz.yaml\n>f+++++++++ objects/namespaced/openstack/pods/heat-ks-user-tfk98.txt\n>f+++++++++ objects/namespaced/openstack/pods/heat-ks-user-tfk98.yaml\n>f+++++++++ objects/namespaced/openstack/pods/heat-rabbit-init-rbl9n.txt\n>f+++++++++ objects/namespaced/openstack/pods/heat-rabbit-init-rbl9n.yaml\n>f+++++++++ objects/namespaced/openstack/pods/heat-trusts-czrrv.txt\n>f+++++++++ objects/namespaced/openstack/pods/heat-trusts-czrrv.yaml\n>f+++++++++ objects/namespaced/openstack/pods/horizon-8cdd7b888-bvzvx.txt\n>f+++++++++ objects/namespaced/openstack/pods/horizon-8cdd7b888-bvzvx.yaml\n>f+++++++++ objects/namespaced/openstack/pods/horizon-db-init-s5pbw.txt\n>f+++++++++ objects/namespaced/openstack/pods/horizon-db-init-s5pbw.yaml\n>f+++++++++ objects/namespaced/openstack/pods/horizon-db-sync-bgr2g.txt\n>f+++++++++ objects/namespaced/openstack/pods/horizon-db-sync-bgr2g.yaml\n>f+++++++++ objects/namespaced/openstack/pods/keepalived-7jdfz.txt\n>f+++++++++ objects/namespaced/openstack/pods/keepalived-7jdfz.yaml\n>f+++++++++ objects/namespaced/openstack/pods/keystone-api-c4656754c-mqbxm.txt\n>f+++++++++ objects/namespaced/openstack/pods/keystone-api-c4656754c-mqbxm.yaml\n>f+++++++++ objects/namespaced/openstack/pods/keystone-bootstrap-mdtrx.txt\n>f+++++++++ objects/namespaced/openstack/pods/keystone-bootstrap-mdtrx.yaml\n>f+++++++++ objects/namespaced/openstack/pods/keystone-credential-setup-6xsvx.txt\n>f+++++++++ objects/namespaced/openstack/pods/keystone-credential-setup-6xsvx.yaml\n>f+++++++++ objects/namespaced/openstack/pods/keystone-db-init-z5mwz.txt\n>f+++++++++ objects/namespaced/openstack/pods/keystone-db-init-z5mwz.yaml\n>f+++++++++ objects/namespaced/openstack/pods/keystone-db-sync-zsq8z.txt\n>f+++++++++ objects/namespaced/openstack/pods/keystone-db-sync-zsq8z.yaml\n>f+++++++++ objects/namespaced/openstack/pods/keystone-domain-manage-v865d.txt\n>f+++++++++ objects/namespaced/openstack/pods/keystone-domain-manage-v865d.yaml\n>f+++++++++ objects/namespaced/openstack/pods/keystone-fernet-setup-5rfqs.txt\n>f+++++++++ objects/namespaced/openstack/pods/keystone-fernet-setup-5rfqs.yaml\n>f+++++++++ objects/namespaced/openstack/pods/keystone-rabbit-init-m44qz.txt\n>f+++++++++ objects/namespaced/openstack/pods/keystone-rabbit-init-m44qz.yaml\n>f+++++++++ objects/namespaced/openstack/pods/libvirt-libvirt-default-6bgrg.txt\n>f+++++++++ objects/namespaced/openstack/pods/libvirt-libvirt-default-6bgrg.yaml\n>f+++++++++ objects/namespaced/openstack/pods/magnum-api-8549df7884-9b2zc.txt\n>f+++++++++ objects/namespaced/openstack/pods/magnum-api-8549df7884-9b2zc.yaml\n>f+++++++++ objects/namespaced/openstack/pods/magnum-cluster-api-proxy-z2flh.txt\n>f+++++++++ objects/namespaced/openstack/pods/magnum-cluster-api-proxy-z2flh.yaml\n>f+++++++++ objects/namespaced/openstack/pods/magnum-conductor-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/magnum-conductor-0.yaml\n>f+++++++++ objects/namespaced/openstack/pods/magnum-db-init-dshrc.txt\n>f+++++++++ objects/namespaced/openstack/pods/magnum-db-init-dshrc.yaml\n>f+++++++++ objects/namespaced/openstack/pods/magnum-db-sync-8ttpk.txt\n>f+++++++++ objects/namespaced/openstack/pods/magnum-db-sync-8ttpk.yaml\n>f+++++++++ objects/namespaced/openstack/pods/magnum-domain-ks-user-vp8f2.txt\n>f+++++++++ objects/namespaced/openstack/pods/magnum-domain-ks-user-vp8f2.yaml\n>f+++++++++ objects/namespaced/openstack/pods/magnum-ks-endpoints-jvzvf.txt\n>f+++++++++ objects/namespaced/openstack/pods/magnum-ks-endpoints-jvzvf.yaml\n>f+++++++++ objects/namespaced/openstack/pods/magnum-ks-service-vdn67.txt\n>f+++++++++ objects/namespaced/openstack/pods/magnum-ks-service-vdn67.yaml\n>f+++++++++ objects/namespaced/openstack/pods/magnum-ks-user-4wvtj.txt\n>f+++++++++ objects/namespaced/openstack/pods/magnum-ks-user-4wvtj.yaml\n>f+++++++++ objects/namespaced/openstack/pods/magnum-rabbit-init-w7jc7.txt\n>f+++++++++ objects/namespaced/openstack/pods/magnum-rabbit-init-w7jc7.yaml\n>f+++++++++ objects/namespaced/openstack/pods/magnum-registry-c45778976-2zz96.txt\n>f+++++++++ objects/namespaced/openstack/pods/magnum-registry-c45778976-2zz96.yaml\n>f+++++++++ objects/namespaced/openstack/pods/manila-api-5cdf958bd9-hmbmb.txt\n>f+++++++++ objects/namespaced/openstack/pods/manila-api-5cdf958bd9-hmbmb.yaml\n>f+++++++++ objects/namespaced/openstack/pods/manila-bootstrap-5wn97.txt\n>f+++++++++ objects/namespaced/openstack/pods/manila-bootstrap-5wn97.yaml\n>f+++++++++ objects/namespaced/openstack/pods/manila-data-75cbc955bd-27jjw.txt\n>f+++++++++ objects/namespaced/openstack/pods/manila-data-75cbc955bd-27jjw.yaml\n>f+++++++++ objects/namespaced/openstack/pods/manila-db-init-pbdm8.txt\n>f+++++++++ objects/namespaced/openstack/pods/manila-db-init-pbdm8.yaml\n>f+++++++++ objects/namespaced/openstack/pods/manila-db-sync-rm9mz.txt\n>f+++++++++ objects/namespaced/openstack/pods/manila-db-sync-rm9mz.yaml\n>f+++++++++ objects/namespaced/openstack/pods/manila-ks-endpoints-d8nr9.txt\n>f+++++++++ objects/namespaced/openstack/pods/manila-ks-endpoints-d8nr9.yaml\n>f+++++++++ objects/namespaced/openstack/pods/manila-ks-service-g7svt.txt\n>f+++++++++ objects/namespaced/openstack/pods/manila-ks-service-g7svt.yaml\n>f+++++++++ objects/namespaced/openstack/pods/manila-ks-user-pr9mg.txt\n>f+++++++++ objects/namespaced/openstack/pods/manila-ks-user-pr9mg.yaml\n>f+++++++++ objects/namespaced/openstack/pods/manila-rabbit-init-74vjs.txt\n>f+++++++++ objects/namespaced/openstack/pods/manila-rabbit-init-74vjs.yaml\n>f+++++++++ objects/namespaced/openstack/pods/manila-scheduler-5b584c8656-mmnnd.txt\n>f+++++++++ objects/namespaced/openstack/pods/manila-scheduler-5b584c8656-mmnnd.yaml\n>f+++++++++ objects/namespaced/openstack/pods/manila-share-68879775b-rc6q9.txt\n>f+++++++++ objects/namespaced/openstack/pods/manila-share-68879775b-rc6q9.yaml\n>f+++++++++ objects/namespaced/openstack/pods/memcached-memcached-6479589586-9sxjx.txt\n>f+++++++++ objects/namespaced/openstack/pods/memcached-memcached-6479589586-9sxjx.yaml\n>f+++++++++ objects/namespaced/openstack/pods/neutron-db-init-l7c9v.txt\n>f+++++++++ objects/namespaced/openstack/pods/neutron-db-init-l7c9v.yaml\n>f+++++++++ objects/namespaced/openstack/pods/neutron-db-sync-brwb5.txt\n>f+++++++++ objects/namespaced/openstack/pods/neutron-db-sync-brwb5.yaml\n>f+++++++++ objects/namespaced/openstack/pods/neutron-ks-endpoints-dstkg.txt\n>f+++++++++ objects/namespaced/openstack/pods/neutron-ks-endpoints-dstkg.yaml\n>f+++++++++ objects/namespaced/openstack/pods/neutron-ks-service-sq4tp.txt\n>f+++++++++ objects/namespaced/openstack/pods/neutron-ks-service-sq4tp.yaml\n>f+++++++++ objects/namespaced/openstack/pods/neutron-ks-user-kcfc4.txt\n>f+++++++++ objects/namespaced/openstack/pods/neutron-ks-user-kcfc4.yaml\n>f+++++++++ objects/namespaced/openstack/pods/neutron-netns-cleanup-cron-default-8frwf.txt\n>f+++++++++ objects/namespaced/openstack/pods/neutron-netns-cleanup-cron-default-8frwf.yaml\n>f+++++++++ objects/namespaced/openstack/pods/neutron-ovn-metadata-agent-default-flhb5.txt\n>f+++++++++ objects/namespaced/openstack/pods/neutron-ovn-metadata-agent-default-flhb5.yaml\n>f+++++++++ objects/namespaced/openstack/pods/neutron-rabbit-init-rdnbf.txt\n>f+++++++++ objects/namespaced/openstack/pods/neutron-rabbit-init-rdnbf.yaml\n>f+++++++++ objects/namespaced/openstack/pods/neutron-server-649c5974f6-5dkvl.txt\n>f+++++++++ objects/namespaced/openstack/pods/neutron-server-649c5974f6-5dkvl.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-api-metadata-546d94ddd7-btnrc.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-api-metadata-546d94ddd7-btnrc.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-api-osapi-99c7b7cd8-2lnzr.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-api-osapi-99c7b7cd8-2lnzr.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-bootstrap-trzqq.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-bootstrap-trzqq.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-cell-setup-29540040-rtzd7.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-cell-setup-29540040-rtzd7.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-cell-setup-j97qh.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-cell-setup-j97qh.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-compute-default-2v5pd.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-compute-default-2v5pd.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-conductor-5474cb4b8d-bxzhq.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-conductor-5474cb4b8d-bxzhq.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-db-init-b4sqh.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-db-init-b4sqh.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-db-sync-2rbjc.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-db-sync-2rbjc.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-ks-endpoints-zwcm6.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-ks-endpoints-zwcm6.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-ks-service-fmj77.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-ks-service-fmj77.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-ks-user-t8xgz.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-ks-user-t8xgz.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-novncproxy-85dd5b5965-z6hmj.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-novncproxy-85dd5b5965-z6hmj.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-rabbit-init-szpvx.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-rabbit-init-szpvx.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-scheduler-78775555d4-hb2j9.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-scheduler-78775555d4-hb2j9.yaml\n>f+++++++++ objects/namespaced/openstack/pods/nova-service-cleaner-29540040-cxdd4.txt\n>f+++++++++ objects/namespaced/openstack/pods/nova-service-cleaner-29540040-cxdd4.yaml\n>f+++++++++ objects/namespaced/openstack/pods/octavia-api-75db6578cf-m656r.txt\n>f+++++++++ objects/namespaced/openstack/pods/octavia-api-75db6578cf-m656r.yaml\n>f+++++++++ objects/namespaced/openstack/pods/octavia-bootstrap-kwfv2.txt\n>f+++++++++ objects/namespaced/openstack/pods/octavia-bootstrap-kwfv2.yaml\n>f+++++++++ objects/namespaced/openstack/pods/octavia-db-init-wnz5h.txt\n>f+++++++++ objects/namespaced/openstack/pods/octavia-db-init-wnz5h.yaml\n>f+++++++++ objects/namespaced/openstack/pods/octavia-db-sync-rjq45.txt\n>f+++++++++ objects/namespaced/openstack/pods/octavia-db-sync-rjq45.yaml\n>f+++++++++ objects/namespaced/openstack/pods/octavia-health-manager-default-twmks.txt\n>f+++++++++ objects/namespaced/openstack/pods/octavia-health-manager-default-twmks.yaml\n>f+++++++++ objects/namespaced/openstack/pods/octavia-housekeeping-87b98c47b-vqwct.txt\n>f+++++++++ objects/namespaced/openstack/pods/octavia-housekeeping-87b98c47b-vqwct.yaml\n>f+++++++++ objects/namespaced/openstack/pods/octavia-ks-endpoints-jdlzw.txt\n>f+++++++++ objects/namespaced/openstack/pods/octavia-ks-endpoints-jdlzw.yaml\n>f+++++++++ objects/namespaced/openstack/pods/octavia-ks-service-rkdp9.txt\n>f+++++++++ objects/namespaced/openstack/pods/octavia-ks-service-rkdp9.yaml\n>f+++++++++ objects/namespaced/openstack/pods/octavia-ks-user-tjl52.txt\n>f+++++++++ objects/namespaced/openstack/pods/octavia-ks-user-tjl52.yaml\n>f+++++++++ objects/namespaced/openstack/pods/octavia-rabbit-init-vdqxf.txt\n>f+++++++++ objects/namespaced/openstack/pods/octavia-rabbit-init-vdqxf.yaml\n>f+++++++++ objects/namespaced/openstack/pods/octavia-worker-774cddbcdc-qxl6k.txt\n>f+++++++++ objects/namespaced/openstack/pods/octavia-worker-774cddbcdc-qxl6k.yaml\n>f+++++++++ objects/namespaced/openstack/pods/openstack-database-exporter-7c944bc9f-w2bdb.txt\n>f+++++++++ objects/namespaced/openstack/pods/openstack-database-exporter-7c944bc9f-w2bdb.yaml\n>f+++++++++ objects/namespaced/openstack/pods/openstack-exporter-74676fb4b4-jrkwh.txt\n>f+++++++++ objects/namespaced/openstack/pods/openstack-exporter-74676fb4b4-jrkwh.yaml\n>f+++++++++ objects/namespaced/openstack/pods/openvswitch-gj98d.txt\n>f+++++++++ objects/namespaced/openstack/pods/openvswitch-gj98d.yaml\n>f+++++++++ objects/namespaced/openstack/pods/ovn-controller-6mbd4.txt\n>f+++++++++ objects/namespaced/openstack/pods/ovn-controller-6mbd4.yaml\n>f+++++++++ objects/namespaced/openstack/pods/ovn-northd-6c6687ddd6-7grhs.txt\n>f+++++++++ objects/namespaced/openstack/pods/ovn-northd-6c6687ddd6-7grhs.yaml\n>f+++++++++ objects/namespaced/openstack/pods/ovn-ovsdb-nb-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/ovn-ovsdb-nb-0.yaml\n>f+++++++++ objects/namespaced/openstack/pods/ovn-ovsdb-sb-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/ovn-ovsdb-sb-0.yaml\n>f+++++++++ objects/namespaced/openstack/pods/percona-xtradb-haproxy-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/percona-xtradb-haproxy-0.yaml\n>f+++++++++ objects/namespaced/openstack/pods/percona-xtradb-pxc-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/percona-xtradb-pxc-0.yaml\n>f+++++++++ objects/namespaced/openstack/pods/placement-api-75695696c6-brsxj.txt\n>f+++++++++ objects/namespaced/openstack/pods/placement-api-75695696c6-brsxj.yaml\n>f+++++++++ objects/namespaced/openstack/pods/placement-db-init-89t92.txt\n>f+++++++++ objects/namespaced/openstack/pods/placement-db-init-89t92.yaml\n>f+++++++++ objects/namespaced/openstack/pods/placement-db-sync-nvqjv.txt\n>f+++++++++ objects/namespaced/openstack/pods/placement-db-sync-nvqjv.yaml\n>f+++++++++ objects/namespaced/openstack/pods/placement-ks-endpoints-jmfl7.txt\n>f+++++++++ objects/namespaced/openstack/pods/placement-ks-endpoints-jmfl7.yaml\n>f+++++++++ objects/namespaced/openstack/pods/placement-ks-service-qdjdz.txt\n>f+++++++++ objects/namespaced/openstack/pods/placement-ks-service-qdjdz.yaml\n>f+++++++++ objects/namespaced/openstack/pods/placement-ks-user-blkn9.txt\n>f+++++++++ objects/namespaced/openstack/pods/placement-ks-user-blkn9.yaml\n>f+++++++++ objects/namespaced/openstack/pods/pxc-operator-7cff949c8b-7zp4j.txt\n>f+++++++++ objects/namespaced/openstack/pods/pxc-operator-7cff949c8b-7zp4j.yaml\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-barbican-server-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-barbican-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-cinder-server-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-cinder-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-cluster-operator-5448d56d95-vk9km.txt\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-cluster-operator-5448d56d95-vk9km.yaml\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-glance-server-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-glance-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-heat-server-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-heat-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-keystone-server-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-keystone-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-magnum-server-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-magnum-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-manila-server-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-manila-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-messaging-topology-operator-7f8596f788-84l9x.txt\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-messaging-topology-operator-7f8596f788-84l9x.yaml\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-neutron-server-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-neutron-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-nova-server-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-nova-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-octavia-server-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/rabbitmq-octavia-server-0.yaml\n>f+++++++++ objects/namespaced/openstack/pods/rook-ceph-crashcollector-instance-754c646bfd-htxl9.txt\n>f+++++++++ objects/namespaced/openstack/pods/rook-ceph-crashcollector-instance-754c646bfd-htxl9.yaml\n>f+++++++++ objects/namespaced/openstack/pods/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw.txt\n>f+++++++++ objects/namespaced/openstack/pods/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw.yaml\n>f+++++++++ objects/namespaced/openstack/pods/staffeln-api-6669c8779f-qgp4c.txt\n>f+++++++++ objects/namespaced/openstack/pods/staffeln-api-6669c8779f-qgp4c.yaml\n>f+++++++++ objects/namespaced/openstack/pods/staffeln-conductor-7b5d99bcd4-ws4sl.txt\n>f+++++++++ objects/namespaced/openstack/pods/staffeln-conductor-7b5d99bcd4-ws4sl.yaml\n>f+++++++++ objects/namespaced/openstack/pods/staffeln-db-init-p4pq4.txt\n>f+++++++++ objects/namespaced/openstack/pods/staffeln-db-init-p4pq4.yaml\n>f+++++++++ objects/namespaced/openstack/pods/staffeln-db-sync-khzx8.txt\n>f+++++++++ objects/namespaced/openstack/pods/staffeln-db-sync-khzx8.yaml\n>f+++++++++ objects/namespaced/openstack/pods/tempest-ks-user-kwbf6.txt\n>f+++++++++ objects/namespaced/openstack/pods/tempest-ks-user-kwbf6.yaml\n>f+++++++++ objects/namespaced/openstack/pods/tempest-run-tests-g5plh.txt\n>f+++++++++ objects/namespaced/openstack/pods/tempest-run-tests-g5plh.yaml\n>f+++++++++ objects/namespaced/openstack/pods/valkey-node-0.txt\n>f+++++++++ objects/namespaced/openstack/pods/valkey-node-0.yaml\ncd+++++++++ objects/namespaced/openstack/rolebindings/\n>f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-api.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-api.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-db-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-backup-storage-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-backup-storage-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-api.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-api.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-backup.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-backup.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-create-internal-tenant.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-create-internal-tenant.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-db-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-scheduler.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-scheduler.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-test.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-test.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-volume-usage-audit.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-volume-usage-audit.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-volume.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-volume.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-storage-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/cinder-storage-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-api.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-api.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-db-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-metadefs-load.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-metadefs-load.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-storage-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-storage-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-test.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-test.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-storage-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/glance-storage-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-api.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-api.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-cfn.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-cfn.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-db-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-engine-cleaner.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-engine-cleaner.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-engine.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-engine.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-user-domain.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-user-domain.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-purge-deleted.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-purge-deleted.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-test.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-test.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-trusts.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-trusts.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon-db-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon-test.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon-test.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/keepalived.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/keepalived.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-credential-rotate.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-credential-rotate.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-credential-setup.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-credential-setup.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-fernet-rotate.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-fernet-rotate.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-fernet-setup.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-fernet-setup.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-api.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-api.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-credential-rotate.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-credential-rotate.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-db-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-domain-manage.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-domain-manage.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-fernet-rotate.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-fernet-rotate.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-test.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-test.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/libvirt-cert-manager.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/libvirt-cert-manager.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/libvirt-libvirt.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/libvirt-libvirt.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-api.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-api.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-conductor.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-conductor.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-db-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-user-domain.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-user-domain.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-api.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-api.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-data.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-data.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-db-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-scheduler.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-scheduler.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-share.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-share.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-db-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ovn-metadata-agent.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ovn-metadata-agent.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-server.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-server.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-test.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-test.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-api-metadata.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-api-metadata.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-api-osapi.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-api-osapi.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-cell-setup-cron.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-cell-setup-cron.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-cell-setup.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-cell-setup.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-compute.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-compute.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-conductor.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-conductor.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-db-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-novncproxy.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-novncproxy.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-scheduler.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-scheduler.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-service-cleaner.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-service-cleaner.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-test.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-test.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-api.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-api.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-db-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-health-manager.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-health-manager.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-housekeeping.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-housekeeping.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-worker.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-worker.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/ovn-controller.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/ovn-controller.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/ovn-northd.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/ovn-northd.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/ovn-ovn-controller.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/ovn-ovn-controller.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/ovn-ovn-northd.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/ovn-ovn-northd.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/ovn-ovsdb.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/ovn-ovsdb.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-api.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-api.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-db-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/pxc-operator.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/pxc-operator.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-barbican-server.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-barbican-server.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-cinder-server.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-cinder-server.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-cluster-operator.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-cluster-operator.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-glance-server.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-glance-server.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-heat-server.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-heat-server.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-keystone-server.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-keystone-server.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-magnum-server.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-magnum-server.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-manila-server.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-manila-server.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-messaging-topology-operator.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-messaging-topology-operator.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-neutron-server.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-neutron-server.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-nova-server.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-nova-server.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-octavia-server.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-octavia-server.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-cluster-mgmt.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-cluster-mgmt.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-cmd-reporter.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-cmd-reporter.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-mgr.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-mgr.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-osd.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-osd.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-purge-osd.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-purge-osd.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-conductor-leases.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-conductor-leases.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-api.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-api.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-conductor.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-conductor.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-db-init.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/tempest-tempest-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/tempest-tempest-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/rolebindings/tempest-tempest-run-tests.txt\n>f+++++++++ objects/namespaced/openstack/rolebindings/tempest-tempest-run-tests.yaml\ncd+++++++++ objects/namespaced/openstack/roles/\n>f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-api.txt\n>f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-api.yaml\n>f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-db-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-backup-storage-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-backup-storage-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-api.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-api.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-backup.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-backup.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-create-internal-tenant.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-create-internal-tenant.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-db-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-scheduler.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-scheduler.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-test.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-test.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-volume-usage-audit.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-volume-usage-audit.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-volume.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-volume.yaml\n>f+++++++++ objects/namespaced/openstack/roles/cinder-storage-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/cinder-storage-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-api.txt\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-api.yaml\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-db-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-metadefs-load.txt\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-metadefs-load.yaml\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-storage-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-storage-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-test.txt\n>f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-test.yaml\n>f+++++++++ objects/namespaced/openstack/roles/glance-storage-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/glance-storage-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-api.txt\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-api.yaml\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-cfn.txt\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-cfn.yaml\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-db-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-engine-cleaner.txt\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-engine-cleaner.yaml\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-engine.txt\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-engine.yaml\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-user-domain.txt\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-user-domain.yaml\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-purge-deleted.txt\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-purge-deleted.yaml\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-test.txt\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-test.yaml\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-trusts.txt\n>f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-trusts.yaml\n>f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon-db-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon-test.txt\n>f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon-test.yaml\n>f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon.txt\n>f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon.yaml\n>f+++++++++ objects/namespaced/openstack/roles/keepalived.txt\n>f+++++++++ objects/namespaced/openstack/roles/keepalived.yaml\n>f+++++++++ objects/namespaced/openstack/roles/keystone-credential-rotate.txt\n>f+++++++++ objects/namespaced/openstack/roles/keystone-credential-rotate.yaml\n>f+++++++++ objects/namespaced/openstack/roles/keystone-credential-setup.txt\n>f+++++++++ objects/namespaced/openstack/roles/keystone-credential-setup.yaml\n>f+++++++++ objects/namespaced/openstack/roles/keystone-fernet-rotate.txt\n>f+++++++++ objects/namespaced/openstack/roles/keystone-fernet-rotate.yaml\n>f+++++++++ objects/namespaced/openstack/roles/keystone-fernet-setup.txt\n>f+++++++++ objects/namespaced/openstack/roles/keystone-fernet-setup.yaml\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-api.txt\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-api.yaml\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-credential-rotate.txt\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-credential-rotate.yaml\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-db-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-domain-manage.txt\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-domain-manage.yaml\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-fernet-rotate.txt\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-fernet-rotate.yaml\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-test.txt\n>f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-test.yaml\n>f+++++++++ objects/namespaced/openstack/roles/libvirt-cert-manager.txt\n>f+++++++++ objects/namespaced/openstack/roles/libvirt-cert-manager.yaml\n>f+++++++++ objects/namespaced/openstack/roles/libvirt-openstack-libvirt.txt\n>f+++++++++ objects/namespaced/openstack/roles/libvirt-openstack-libvirt.yaml\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-api.txt\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-api.yaml\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-conductor.txt\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-conductor.yaml\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-db-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-user-domain.txt\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-user-domain.yaml\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-api.txt\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-api.yaml\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-data.txt\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-data.yaml\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-db-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-scheduler.txt\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-scheduler.yaml\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-share.txt\n>f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-share.yaml\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-db-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ovn-metadata-agent.txt\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ovn-metadata-agent.yaml\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-server.txt\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-server.yaml\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-test.txt\n>f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-test.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-api-metadata.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-api-metadata.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-api-osapi.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-api-osapi.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-cell-setup-cron.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-cell-setup-cron.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-cell-setup.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-cell-setup.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-compute.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-compute.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-conductor.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-conductor.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-db-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-novncproxy.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-novncproxy.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-scheduler.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-scheduler.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-service-cleaner.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-service-cleaner.yaml\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-test.txt\n>f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-test.yaml\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-api.txt\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-api.yaml\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-db-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-health-manager.txt\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-health-manager.yaml\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-housekeeping.txt\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-housekeeping.yaml\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-worker.txt\n>f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-worker.yaml\n>f+++++++++ objects/namespaced/openstack/roles/ovn-controller.txt\n>f+++++++++ objects/namespaced/openstack/roles/ovn-controller.yaml\n>f+++++++++ objects/namespaced/openstack/roles/ovn-northd.txt\n>f+++++++++ objects/namespaced/openstack/roles/ovn-northd.yaml\n>f+++++++++ objects/namespaced/openstack/roles/ovn-openstack-ovn-controller.txt\n>f+++++++++ objects/namespaced/openstack/roles/ovn-openstack-ovn-controller.yaml\n>f+++++++++ objects/namespaced/openstack/roles/ovn-openstack-ovn-northd.txt\n>f+++++++++ objects/namespaced/openstack/roles/ovn-openstack-ovn-northd.yaml\n>f+++++++++ objects/namespaced/openstack/roles/ovn-ovsdb.txt\n>f+++++++++ objects/namespaced/openstack/roles/ovn-ovsdb.yaml\n>f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-api.txt\n>f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-api.yaml\n>f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-db-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/roles/pxc-operator.txt\n>f+++++++++ objects/namespaced/openstack/roles/pxc-operator.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-barbican-peer-discovery.txt\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-barbican-peer-discovery.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-cinder-peer-discovery.txt\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-cinder-peer-discovery.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-cluster-operator.txt\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-cluster-operator.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-glance-peer-discovery.txt\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-glance-peer-discovery.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-heat-peer-discovery.txt\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-heat-peer-discovery.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-keystone-peer-discovery.txt\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-keystone-peer-discovery.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-magnum-peer-discovery.txt\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-magnum-peer-discovery.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-manila-peer-discovery.txt\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-manila-peer-discovery.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-messaging-topology-operator.txt\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-messaging-topology-operator.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-neutron-peer-discovery.txt\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-neutron-peer-discovery.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-nova-peer-discovery.txt\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-nova-peer-discovery.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-octavia-peer-discovery.txt\n>f+++++++++ objects/namespaced/openstack/roles/rabbitmq-octavia-peer-discovery.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rook-ceph-cmd-reporter.txt\n>f+++++++++ objects/namespaced/openstack/roles/rook-ceph-cmd-reporter.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rook-ceph-mgr.txt\n>f+++++++++ objects/namespaced/openstack/roles/rook-ceph-mgr.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rook-ceph-osd.txt\n>f+++++++++ objects/namespaced/openstack/roles/rook-ceph-osd.yaml\n>f+++++++++ objects/namespaced/openstack/roles/rook-ceph-purge-osd.txt\n>f+++++++++ objects/namespaced/openstack/roles/rook-ceph-purge-osd.yaml\n>f+++++++++ objects/namespaced/openstack/roles/staffeln-conductor.txt\n>f+++++++++ objects/namespaced/openstack/roles/staffeln-conductor.yaml\n>f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-api.txt\n>f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-api.yaml\n>f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-conductor.txt\n>f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-conductor.yaml\n>f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-db-init.txt\n>f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/roles/tempest-openstack-tempest-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/roles/tempest-openstack-tempest-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/roles/tempest-openstack-tempest-run-tests.txt\n>f+++++++++ objects/namespaced/openstack/roles/tempest-openstack-tempest-run-tests.yaml\ncd+++++++++ objects/namespaced/openstack/secrets/\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-api-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-api-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-db-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-db-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-db-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-db-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-keystone-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-keystone-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-keystone-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-keystone-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-rabbitmq-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-rabbitmq-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-rabbitmq-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/barbican-rabbitmq-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-api-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-api-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-backup-rbd-keyring.txt\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-backup-rbd-keyring.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-db-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-db-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-db-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-db-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-keystone-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-keystone-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-keystone-test.txt\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-keystone-test.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-keystone-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-keystone-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-rabbitmq-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-rabbitmq-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-rabbitmq-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-rabbitmq-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-volume-rbd-keyring.txt\n>f+++++++++ objects/namespaced/openstack/secrets/cinder-volume-rbd-keyring.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/glance-api-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/glance-api-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/glance-db-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/glance-db-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/glance-db-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/glance-db-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/glance-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/glance-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/glance-keystone-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/glance-keystone-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/glance-keystone-test.txt\n>f+++++++++ objects/namespaced/openstack/secrets/glance-keystone-test.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/glance-keystone-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/glance-keystone-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/glance-rabbitmq-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/glance-rabbitmq-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/glance-rabbitmq-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/glance-rabbitmq-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/heat-api-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/heat-api-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/heat-cfn-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/heat-cfn-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/heat-db-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/heat-db-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/heat-db-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/heat-db-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/heat-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/heat-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-stack-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-stack-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-test.txt\n>f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-test.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-trustee.txt\n>f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-trustee.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/heat-rabbitmq-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/heat-rabbitmq-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/heat-rabbitmq-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/heat-rabbitmq-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/horizon-db-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/horizon-db-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/horizon-db-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/horizon-db-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/horizon-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/horizon-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/horizon-int-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/horizon-int-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/horizon-keystone-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/horizon-keystone-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/images-rbd-keyring.txt\n>f+++++++++ objects/namespaced/openstack/secrets/images-rbd-keyring.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/internal-percona-xtradb.txt\n>f+++++++++ objects/namespaced/openstack/secrets/internal-percona-xtradb.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/keepalived-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/keepalived-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-api-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-api-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-credential-keys.txt\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-credential-keys.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-db-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-db-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-db-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-db-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-fernet-keys.txt\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-fernet-keys.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-keystone-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-keystone-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-keystone-test.txt\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-keystone-test.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-rabbitmq-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-rabbitmq-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-rabbitmq-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/keystone-rabbitmq-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/libvirt-api-ca.txt\n>f+++++++++ objects/namespaced/openstack/secrets/libvirt-api-ca.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/libvirt-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/libvirt-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/libvirt-libvirt-default-6bgrg-api.txt\n>f+++++++++ objects/namespaced/openstack/secrets/libvirt-libvirt-default-6bgrg-api.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/libvirt-libvirt-default-6bgrg-vnc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/libvirt-libvirt-default-6bgrg-vnc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/libvirt-libvirt-default-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/libvirt-libvirt-default-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/libvirt-vnc-ca.txt\n>f+++++++++ objects/namespaced/openstack/secrets/libvirt-vnc-ca.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-api-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-api-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-db-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-db-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-db-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-db-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-keystone-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-keystone-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-keystone-stack-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-keystone-stack-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-keystone-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-keystone-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-rabbitmq-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-rabbitmq-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-rabbitmq-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-rabbitmq-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-registry-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/magnum-registry-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/manila-api-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/manila-api-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/manila-db-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/manila-db-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/manila-db-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/manila-db-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/manila-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/manila-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/manila-keystone-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/manila-keystone-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/manila-keystone-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/manila-keystone-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/manila-rabbitmq-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/manila-rabbitmq-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/manila-rabbitmq-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/manila-rabbitmq-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/manila-ssh-keys.txt\n>f+++++++++ objects/namespaced/openstack/secrets/manila-ssh-keys.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-db-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-db-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-db-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-db-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-keystone-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-keystone-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-keystone-test.txt\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-keystone-test.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-keystone-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-keystone-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-netns-cleanup-cron-default.txt\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-netns-cleanup-cron-default.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-ovn-metadata-agent-default.txt\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-ovn-metadata-agent-default.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-rabbitmq-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-rabbitmq-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-rabbitmq-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-rabbitmq-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-server-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/neutron-server-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-api-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-api-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-compute-default.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-compute-default.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-db-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-db-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-db-api-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-db-api-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-db-api-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-db-api-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-db-cell0-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-db-cell0-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-db-cell0-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-db-cell0-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-db-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-db-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-keystone-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-keystone-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-keystone-test.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-keystone-test.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-keystone-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-keystone-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-novncproxy-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-novncproxy-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-novncproxy-vencrypt.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-novncproxy-vencrypt.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-rabbitmq-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-rabbitmq-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-rabbitmq-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-rabbitmq-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/nova-ssh.txt\n>f+++++++++ objects/namespaced/openstack/secrets/nova-ssh.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-amphora-ssh-key.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-amphora-ssh-key.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-api-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-api-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-client-ca.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-client-ca.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-client-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-client-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-db-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-db-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-db-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-db-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-health-manager-default.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-health-manager-default.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-keystone-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-keystone-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-keystone-test.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-keystone-test.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-keystone-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-keystone-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-persistence-db-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-persistence-db-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-persistence-db-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-persistence-db-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-rabbitmq-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-rabbitmq-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-rabbitmq-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-rabbitmq-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-server-ca.txt\n>f+++++++++ objects/namespaced/openstack/secrets/octavia-server-ca.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/openstack-database-exporter-dsn.txt\n>f+++++++++ objects/namespaced/openstack/secrets/openstack-database-exporter-dsn.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/ovn-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/ovn-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/ovn-vector-config.txt\n>f+++++++++ objects/namespaced/openstack/secrets/ovn-vector-config.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/percona-xtradb.txt\n>f+++++++++ objects/namespaced/openstack/secrets/percona-xtradb.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/placement-api-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/placement-api-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/placement-db-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/placement-db-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/placement-db-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/placement-db-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/placement-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/placement-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/placement-keystone-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/placement-keystone-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/placement-keystone-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/placement-keystone-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/pvc-ceph-client-key.txt\n>f+++++++++ objects/namespaced/openstack/secrets/pvc-ceph-client-key.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-barbican-default-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-barbican-default-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-barbican-erlang-cookie.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-barbican-erlang-cookie.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-cinder-default-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-cinder-default-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-cinder-erlang-cookie.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-cinder-erlang-cookie.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-glance-default-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-glance-default-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-glance-erlang-cookie.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-glance-erlang-cookie.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-heat-default-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-heat-default-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-heat-erlang-cookie.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-heat-erlang-cookie.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-keystone-default-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-keystone-default-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-keystone-erlang-cookie.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-keystone-erlang-cookie.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-magnum-default-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-magnum-default-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-magnum-erlang-cookie.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-magnum-erlang-cookie.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-manila-default-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-manila-default-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-manila-erlang-cookie.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-manila-erlang-cookie.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-messaging-topology-operator-webhook.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-messaging-topology-operator-webhook.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-neutron-default-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-neutron-default-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-neutron-erlang-cookie.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-neutron-erlang-cookie.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-nova-default-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-nova-default-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-nova-erlang-cookie.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-nova-erlang-cookie.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-octavia-default-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-octavia-default-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-octavia-erlang-cookie.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-octavia-erlang-cookie.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-config.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-config.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-crash-collector-keyring.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-crash-collector-keyring.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-mon.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-mon.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-object-user-ceph-cosi.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-object-user-ceph-cosi.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-rgw-ceph-a-keyring.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-rgw-ceph-a-keyring.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-rgw-ceph-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-rgw-ceph-certs.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rook-csi-cephfs-node.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rook-csi-cephfs-node.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rook-csi-cephfs-provisioner.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rook-csi-cephfs-provisioner.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rook-csi-rbd-node.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rook-csi-rbd-node.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/rook-csi-rbd-provisioner.txt\n>f+++++++++ objects/namespaced/openstack/secrets/rook-csi-rbd-provisioner.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.barbican.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.barbican.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.ceph-provisioners.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.ceph-provisioners.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.ceph.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.ceph.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.cinder.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.cinder.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.glance.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.glance.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.heat.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.heat.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.horizon.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.horizon.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.keystone.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.keystone.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.libvirt.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.libvirt.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.magnum.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.magnum.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.manila.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.manila.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.memcached.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.memcached.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.neutron.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.neutron.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.nova.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.nova.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.octavia.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.octavia.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.openvswitch.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.openvswitch.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.ovn.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.ovn.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.placement.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.placement.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.pxc-operator.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.pxc-operator.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.rabbitmq-cluster-operator.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.rabbitmq-cluster-operator.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.staffeln.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.staffeln.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.tempest.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.tempest.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.valkey.v1.txt\n>f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.valkey.v1.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/staffeln-db-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/staffeln-db-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/staffeln-db-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/staffeln-db-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/staffeln-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/staffeln-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/tempest-etc.txt\n>f+++++++++ objects/namespaced/openstack/secrets/tempest-etc.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/tempest-keystone-admin.txt\n>f+++++++++ objects/namespaced/openstack/secrets/tempest-keystone-admin.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/tempest-keystone-user.txt\n>f+++++++++ objects/namespaced/openstack/secrets/tempest-keystone-user.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/valkey-ca.txt\n>f+++++++++ objects/namespaced/openstack/secrets/valkey-ca.yaml\n>f+++++++++ objects/namespaced/openstack/secrets/valkey-server-certs.txt\n>f+++++++++ objects/namespaced/openstack/secrets/valkey-server-certs.yaml\ncd+++++++++ objects/namespaced/openstack/serviceaccounts/\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-api.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-api.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-db-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-test.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-test.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-api.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-api.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-backup-storage-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-backup-storage-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-backup.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-backup.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-create-internal-tenant.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-create-internal-tenant.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-db-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-scheduler.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-scheduler.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-storage-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-storage-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-test.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-test.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-volume-usage-audit.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-volume-usage-audit.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-volume.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-volume.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/default.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-api.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-api.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-db-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-metadefs-load.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-metadefs-load.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-storage-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-storage-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-test.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-test.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-api.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-api.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-cfn.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-cfn.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-db-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-engine-cleaner.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-engine-cleaner.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-engine.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-engine.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-user-domain.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-user-domain.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-purge-deleted.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-purge-deleted.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-test.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-test.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-trusts.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-trusts.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon-db-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon-test.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon-test.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keepalived.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keepalived.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-api.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-api.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-credential-rotate.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-credential-rotate.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-credential-setup.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-credential-setup.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-db-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-domain-manage.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-domain-manage.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-fernet-rotate.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-fernet-rotate.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-fernet-setup.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-fernet-setup.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-test.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-test.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/libvirt.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/libvirt.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-api.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-api.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-conductor.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-conductor.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-db-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-user-domain.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-user-domain.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-api.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-api.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-data.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-data.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-db-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-scheduler.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-scheduler.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-share.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-share.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/memcached-memcached.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/memcached-memcached.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-db-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-netns-cleanup-cron.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-netns-cleanup-cron.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ovn-metadata-agent.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ovn-metadata-agent.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-server.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-server.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-test.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-test.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-api-metadata.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-api-metadata.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-api-osapi.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-api-osapi.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-cell-setup-cron.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-cell-setup-cron.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-cell-setup.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-cell-setup.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-compute.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-compute.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-conductor.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-conductor.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-db-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-novncproxy.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-novncproxy.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-scheduler.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-scheduler.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-service-cleaner.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-service-cleaner.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-test.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-test.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-api.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-api.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-bootstrap.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-bootstrap.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-db-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-health-manager.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-health-manager.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-housekeeping.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-housekeeping.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-rabbit-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-rabbit-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-worker.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-worker.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/openvswitch-server.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/openvswitch-server.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-controller.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-controller.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-northd.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-northd.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-ovsdb-nb.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-ovsdb-nb.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-ovsdb-sb.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-ovsdb-sb.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-api.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-api.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-db-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-ks-endpoints.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-ks-endpoints.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-ks-service.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-ks-service.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/pxc-operator.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/pxc-operator.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-barbican-server.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-barbican-server.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-cinder-server.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-cinder-server.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-cluster-operator.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-cluster-operator.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-glance-server.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-glance-server.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-heat-server.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-heat-server.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-keystone-server.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-keystone-server.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-magnum-server.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-magnum-server.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-manila-server.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-manila-server.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-messaging-topology-operator.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-messaging-topology-operator.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-neutron-server.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-neutron-server.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-nova-server.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-nova-server.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-octavia-server.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-octavia-server.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-cmd-reporter.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-cmd-reporter.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-default.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-default.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-mgr.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-mgr.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-osd.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-osd.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-purge-osd.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-purge-osd.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-rgw.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-rgw.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-api.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-api.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-conductor.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-conductor.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-db-init.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-db-init.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-db-sync.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-db-sync.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/tempest-ks-user.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/tempest-ks-user.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/tempest-run-tests.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/tempest-run-tests.yaml\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/valkey.txt\n>f+++++++++ objects/namespaced/openstack/serviceaccounts/valkey.yaml\ncd+++++++++ objects/namespaced/openstack/services/\n>f+++++++++ objects/namespaced/openstack/services/barbican-api.txt\n>f+++++++++ objects/namespaced/openstack/services/barbican-api.yaml\n>f+++++++++ objects/namespaced/openstack/services/ceph-mon.txt\n>f+++++++++ objects/namespaced/openstack/services/ceph-mon.yaml\n>f+++++++++ objects/namespaced/openstack/services/cinder-api.txt\n>f+++++++++ objects/namespaced/openstack/services/cinder-api.yaml\n>f+++++++++ objects/namespaced/openstack/services/glance-api.txt\n>f+++++++++ objects/namespaced/openstack/services/glance-api.yaml\n>f+++++++++ objects/namespaced/openstack/services/heat-api.txt\n>f+++++++++ objects/namespaced/openstack/services/heat-api.yaml\n>f+++++++++ objects/namespaced/openstack/services/heat-cfn.txt\n>f+++++++++ objects/namespaced/openstack/services/heat-cfn.yaml\n>f+++++++++ objects/namespaced/openstack/services/horizon-int.txt\n>f+++++++++ objects/namespaced/openstack/services/horizon-int.yaml\n>f+++++++++ objects/namespaced/openstack/services/horizon.txt\n>f+++++++++ objects/namespaced/openstack/services/horizon.yaml\n>f+++++++++ objects/namespaced/openstack/services/keystone-api.txt\n>f+++++++++ objects/namespaced/openstack/services/keystone-api.yaml\n>f+++++++++ objects/namespaced/openstack/services/magnum-api.txt\n>f+++++++++ objects/namespaced/openstack/services/magnum-api.yaml\n>f+++++++++ objects/namespaced/openstack/services/magnum-registry.txt\n>f+++++++++ objects/namespaced/openstack/services/magnum-registry.yaml\n>f+++++++++ objects/namespaced/openstack/services/manila-api.txt\n>f+++++++++ objects/namespaced/openstack/services/manila-api.yaml\n>f+++++++++ objects/namespaced/openstack/services/memcached-metrics.txt\n>f+++++++++ objects/namespaced/openstack/services/memcached-metrics.yaml\n>f+++++++++ objects/namespaced/openstack/services/memcached.txt\n>f+++++++++ objects/namespaced/openstack/services/memcached.yaml\n>f+++++++++ objects/namespaced/openstack/services/neutron-server.txt\n>f+++++++++ objects/namespaced/openstack/services/neutron-server.yaml\n>f+++++++++ objects/namespaced/openstack/services/nova-api.txt\n>f+++++++++ objects/namespaced/openstack/services/nova-api.yaml\n>f+++++++++ objects/namespaced/openstack/services/nova-metadata.txt\n>f+++++++++ objects/namespaced/openstack/services/nova-metadata.yaml\n>f+++++++++ objects/namespaced/openstack/services/nova-novncproxy.txt\n>f+++++++++ objects/namespaced/openstack/services/nova-novncproxy.yaml\n>f+++++++++ objects/namespaced/openstack/services/octavia-api.txt\n>f+++++++++ objects/namespaced/openstack/services/octavia-api.yaml\n>f+++++++++ objects/namespaced/openstack/services/openstack-exporter.txt\n>f+++++++++ objects/namespaced/openstack/services/openstack-exporter.yaml\n>f+++++++++ objects/namespaced/openstack/services/ovn-ovsdb-nb.txt\n>f+++++++++ objects/namespaced/openstack/services/ovn-ovsdb-nb.yaml\n>f+++++++++ objects/namespaced/openstack/services/ovn-ovsdb-sb.txt\n>f+++++++++ objects/namespaced/openstack/services/ovn-ovsdb-sb.yaml\n>f+++++++++ objects/namespaced/openstack/services/percona-xtradb-cluster-operator.txt\n>f+++++++++ objects/namespaced/openstack/services/percona-xtradb-cluster-operator.yaml\n>f+++++++++ objects/namespaced/openstack/services/percona-xtradb-haproxy-metrics.txt\n>f+++++++++ objects/namespaced/openstack/services/percona-xtradb-haproxy-metrics.yaml\n>f+++++++++ objects/namespaced/openstack/services/percona-xtradb-haproxy-replicas.txt\n>f+++++++++ objects/namespaced/openstack/services/percona-xtradb-haproxy-replicas.yaml\n>f+++++++++ objects/namespaced/openstack/services/percona-xtradb-haproxy.txt\n>f+++++++++ objects/namespaced/openstack/services/percona-xtradb-haproxy.yaml\n>f+++++++++ objects/namespaced/openstack/services/percona-xtradb-pxc-unready.txt\n>f+++++++++ objects/namespaced/openstack/services/percona-xtradb-pxc-unready.yaml\n>f+++++++++ objects/namespaced/openstack/services/percona-xtradb-pxc.txt\n>f+++++++++ objects/namespaced/openstack/services/percona-xtradb-pxc.yaml\n>f+++++++++ objects/namespaced/openstack/services/placement-api.txt\n>f+++++++++ objects/namespaced/openstack/services/placement-api.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-barbican-nodes.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-barbican-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-barbican.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-barbican.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-cinder-nodes.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-cinder-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-cinder.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-cinder.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-glance-nodes.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-glance-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-glance.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-glance.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-heat-nodes.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-heat-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-heat.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-heat.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-keystone-nodes.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-keystone-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-keystone.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-keystone.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-magnum-nodes.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-magnum-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-magnum.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-magnum.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-manila-nodes.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-manila-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-manila.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-manila.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-messaging-topology-operator-webhook.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-messaging-topology-operator-webhook.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-neutron-nodes.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-neutron-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-neutron.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-neutron.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-nova-nodes.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-nova-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-nova.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-nova.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-octavia-nodes.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-octavia-nodes.yaml\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-octavia.txt\n>f+++++++++ objects/namespaced/openstack/services/rabbitmq-octavia.yaml\n>f+++++++++ objects/namespaced/openstack/services/rook-ceph-rgw-ceph.txt\n>f+++++++++ objects/namespaced/openstack/services/rook-ceph-rgw-ceph.yaml\n>f+++++++++ objects/namespaced/openstack/services/staffeln-api.txt\n>f+++++++++ objects/namespaced/openstack/services/staffeln-api.yaml\n>f+++++++++ objects/namespaced/openstack/services/valkey-headless.txt\n>f+++++++++ objects/namespaced/openstack/services/valkey-headless.yaml\n>f+++++++++ objects/namespaced/openstack/services/valkey-metrics.txt\n>f+++++++++ objects/namespaced/openstack/services/valkey-metrics.yaml\n>f+++++++++ objects/namespaced/openstack/services/valkey.txt\n>f+++++++++ objects/namespaced/openstack/services/valkey.yaml\ncd+++++++++ objects/namespaced/openstack/statefulsets/\n>f+++++++++ objects/namespaced/openstack/statefulsets/magnum-conductor.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/magnum-conductor.yaml\n>f+++++++++ objects/namespaced/openstack/statefulsets/ovn-ovsdb-nb.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/ovn-ovsdb-nb.yaml\n>f+++++++++ objects/namespaced/openstack/statefulsets/ovn-ovsdb-sb.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/ovn-ovsdb-sb.yaml\n>f+++++++++ objects/namespaced/openstack/statefulsets/percona-xtradb-haproxy.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/percona-xtradb-haproxy.yaml\n>f+++++++++ objects/namespaced/openstack/statefulsets/percona-xtradb-pxc.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/percona-xtradb-pxc.yaml\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-barbican-server.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-barbican-server.yaml\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-cinder-server.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-cinder-server.yaml\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-glance-server.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-glance-server.yaml\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-heat-server.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-heat-server.yaml\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-keystone-server.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-keystone-server.yaml\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-magnum-server.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-magnum-server.yaml\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-manila-server.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-manila-server.yaml\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-neutron-server.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-neutron-server.yaml\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-nova-server.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-nova-server.yaml\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-octavia-server.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-octavia-server.yaml\n>f+++++++++ objects/namespaced/openstack/statefulsets/valkey-node.txt\n>f+++++++++ objects/namespaced/openstack/statefulsets/valkey-node.yaml\ncd+++++++++ objects/namespaced/orc-system/\ncd+++++++++ objects/namespaced/orc-system/configmaps/\n>f+++++++++ objects/namespaced/orc-system/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/orc-system/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/orc-system/deployment/\n>f+++++++++ objects/namespaced/orc-system/deployment/orc-controller-manager.txt\n>f+++++++++ objects/namespaced/orc-system/deployment/orc-controller-manager.yaml\ncd+++++++++ objects/namespaced/orc-system/endpoints/\n>f+++++++++ objects/namespaced/orc-system/endpoints/orc-controller-manager-metrics-service.txt\n>f+++++++++ objects/namespaced/orc-system/endpoints/orc-controller-manager-metrics-service.yaml\ncd+++++++++ objects/namespaced/orc-system/pods/\n>f+++++++++ objects/namespaced/orc-system/pods/orc-controller-manager-6cb597b5d4-glhcz.txt\n>f+++++++++ objects/namespaced/orc-system/pods/orc-controller-manager-6cb597b5d4-glhcz.yaml\ncd+++++++++ objects/namespaced/orc-system/rolebindings/\n>f+++++++++ objects/namespaced/orc-system/rolebindings/orc-leader-election-rolebinding.txt\n>f+++++++++ objects/namespaced/orc-system/rolebindings/orc-leader-election-rolebinding.yaml\ncd+++++++++ objects/namespaced/orc-system/roles/\n>f+++++++++ objects/namespaced/orc-system/roles/orc-leader-election-role.txt\n>f+++++++++ objects/namespaced/orc-system/roles/orc-leader-election-role.yaml\ncd+++++++++ objects/namespaced/orc-system/serviceaccounts/\n>f+++++++++ objects/namespaced/orc-system/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/orc-system/serviceaccounts/default.yaml\n>f+++++++++ objects/namespaced/orc-system/serviceaccounts/orc-controller-manager.txt\n>f+++++++++ objects/namespaced/orc-system/serviceaccounts/orc-controller-manager.yaml\ncd+++++++++ objects/namespaced/orc-system/services/\n>f+++++++++ objects/namespaced/orc-system/services/orc-controller-manager-metrics-service.txt\n>f+++++++++ objects/namespaced/orc-system/services/orc-controller-manager-metrics-service.yaml\ncd+++++++++ objects/namespaced/rook-ceph/\ncd+++++++++ objects/namespaced/rook-ceph/configmaps/\n>f+++++++++ objects/namespaced/rook-ceph/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/rook-ceph/configmaps/kube-root-ca.crt.yaml\n>f+++++++++ objects/namespaced/rook-ceph/configmaps/rook-ceph-csi-config.txt\n>f+++++++++ objects/namespaced/rook-ceph/configmaps/rook-ceph-csi-config.yaml\n>f+++++++++ objects/namespaced/rook-ceph/configmaps/rook-ceph-csi-mapping-config.txt\n>f+++++++++ objects/namespaced/rook-ceph/configmaps/rook-ceph-csi-mapping-config.yaml\n>f+++++++++ objects/namespaced/rook-ceph/configmaps/rook-ceph-operator-config.txt\n>f+++++++++ objects/namespaced/rook-ceph/configmaps/rook-ceph-operator-config.yaml\ncd+++++++++ objects/namespaced/rook-ceph/deployment/\n>f+++++++++ objects/namespaced/rook-ceph/deployment/rook-ceph-operator.txt\n>f+++++++++ objects/namespaced/rook-ceph/deployment/rook-ceph-operator.yaml\ncd+++++++++ objects/namespaced/rook-ceph/pods/\n>f+++++++++ objects/namespaced/rook-ceph/pods/rook-ceph-operator-7b66cfb94c-tj94j.txt\n>f+++++++++ objects/namespaced/rook-ceph/pods/rook-ceph-operator-7b66cfb94c-tj94j.yaml\ncd+++++++++ objects/namespaced/rook-ceph/rolebindings/\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/cephfs-csi-provisioner-role-cfg.txt\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/cephfs-csi-provisioner-role-cfg.yaml\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rbd-csi-provisioner-role-cfg.txt\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rbd-csi-provisioner-role-cfg.yaml\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-cluster-mgmt.txt\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-cluster-mgmt.yaml\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-cmd-reporter.txt\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-cmd-reporter.yaml\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-mgr-system-openstack.txt\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-mgr-system-openstack.yaml\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-mgr-system.txt\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-mgr-system.yaml\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-mgr.txt\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-mgr.yaml\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-osd.txt\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-osd.yaml\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-purge-osd.txt\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-purge-osd.yaml\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-system.txt\n>f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-system.yaml\ncd+++++++++ objects/namespaced/rook-ceph/roles/\n>f+++++++++ objects/namespaced/rook-ceph/roles/cephfs-external-provisioner-cfg.txt\n>f+++++++++ objects/namespaced/rook-ceph/roles/cephfs-external-provisioner-cfg.yaml\n>f+++++++++ objects/namespaced/rook-ceph/roles/rbd-external-provisioner-cfg.txt\n>f+++++++++ objects/namespaced/rook-ceph/roles/rbd-external-provisioner-cfg.yaml\n>f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-cmd-reporter.txt\n>f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-cmd-reporter.yaml\n>f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-mgr.txt\n>f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-mgr.yaml\n>f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-osd.txt\n>f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-osd.yaml\n>f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-purge-osd.txt\n>f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-purge-osd.yaml\n>f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-system.txt\n>f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-system.yaml\ncd+++++++++ objects/namespaced/rook-ceph/secrets/\n>f+++++++++ objects/namespaced/rook-ceph/secrets/sh.helm.release.v1.rook-ceph.v1.txt\n>f+++++++++ objects/namespaced/rook-ceph/secrets/sh.helm.release.v1.rook-ceph.v1.yaml\ncd+++++++++ objects/namespaced/rook-ceph/serviceaccounts/\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/default.yaml\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/objectstorage-provisioner.txt\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/objectstorage-provisioner.yaml\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-cmd-reporter.txt\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-cmd-reporter.yaml\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-default.txt\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-default.yaml\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-mgr.txt\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-mgr.yaml\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-osd.txt\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-osd.yaml\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-purge-osd.txt\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-purge-osd.yaml\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-rgw.txt\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-rgw.yaml\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-system.txt\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-system.yaml\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-cephfs-plugin-sa.txt\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-cephfs-plugin-sa.yaml\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-cephfs-provisioner-sa.txt\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-cephfs-provisioner-sa.yaml\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-rbd-plugin-sa.txt\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-rbd-plugin-sa.yaml\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-rbd-provisioner-sa.txt\n>f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-rbd-provisioner-sa.yaml\ncd+++++++++ objects/namespaced/secretgen-controller/\ncd+++++++++ objects/namespaced/secretgen-controller/configmaps/\n>f+++++++++ objects/namespaced/secretgen-controller/configmaps/kube-root-ca.crt.txt\n>f+++++++++ objects/namespaced/secretgen-controller/configmaps/kube-root-ca.crt.yaml\ncd+++++++++ objects/namespaced/secretgen-controller/deployment/\n>f+++++++++ objects/namespaced/secretgen-controller/deployment/secretgen-controller.txt\n>f+++++++++ objects/namespaced/secretgen-controller/deployment/secretgen-controller.yaml\ncd+++++++++ objects/namespaced/secretgen-controller/pods/\n>f+++++++++ objects/namespaced/secretgen-controller/pods/secretgen-controller-5cf976ccc7-szs5h.txt\n>f+++++++++ objects/namespaced/secretgen-controller/pods/secretgen-controller-5cf976ccc7-szs5h.yaml\ncd+++++++++ objects/namespaced/secretgen-controller/serviceaccounts/\n>f+++++++++ objects/namespaced/secretgen-controller/serviceaccounts/default.txt\n>f+++++++++ objects/namespaced/secretgen-controller/serviceaccounts/default.yaml\n>f+++++++++ objects/namespaced/secretgen-controller/serviceaccounts/secretgen-controller-sa.txt\n>f+++++++++ objects/namespaced/secretgen-controller/serviceaccounts/secretgen-controller-sa.yaml\n",
                            "rc": 0,
                            "stdout_lines": [
                                "cd+++++++++ objects/",
                                "cd+++++++++ objects/cluster/",
                                "cd+++++++++ objects/cluster/clusterrole/",
                                ">f+++++++++ objects/cluster/clusterrole/admin.txt",
                                ">f+++++++++ objects/cluster/clusterrole/admin.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/capi-aggregated-manager-role.txt",
                                ">f+++++++++ objects/cluster/clusterrole/capi-aggregated-manager-role.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/capi-kubeadm-bootstrap-manager-role.txt",
                                ">f+++++++++ objects/cluster/clusterrole/capi-kubeadm-bootstrap-manager-role.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/capi-kubeadm-control-plane-aggregated-manager-role.txt",
                                ">f+++++++++ objects/cluster/clusterrole/capi-kubeadm-control-plane-aggregated-manager-role.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/capi-kubeadm-control-plane-manager-role.txt",
                                ">f+++++++++ objects/cluster/clusterrole/capi-kubeadm-control-plane-manager-role.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/capi-manager-role.txt",
                                ">f+++++++++ objects/cluster/clusterrole/capi-manager-role.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/capo-manager-role.txt",
                                ">f+++++++++ objects/cluster/clusterrole/capo-manager-role.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cephfs-csi-nodeplugin.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cephfs-csi-nodeplugin.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cephfs-external-provisioner-runner.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cephfs-external-provisioner-runner.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-cainjector.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-cainjector.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-approve:cert-manager-io.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-approve:cert-manager-io.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-certificates.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-certificates.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-certificatesigningrequests.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-certificatesigningrequests.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-challenges.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-challenges.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-clusterissuers.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-clusterissuers.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-ingress-shim.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-ingress-shim.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-issuers.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-issuers.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-orders.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-controller-orders.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-edit.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-edit.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-view.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-view.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-webhook:subjectaccessreviews.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cert-manager-webhook:subjectaccessreviews.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cilium-operator.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cilium-operator.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cilium.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cilium.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/cluster-admin.txt",
                                ">f+++++++++ objects/cluster/clusterrole/cluster-admin.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/edit.txt",
                                ">f+++++++++ objects/cluster/clusterrole/edit.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/goldpinger-clusterrole.txt",
                                ">f+++++++++ objects/cluster/clusterrole/goldpinger-clusterrole.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/ingress-nginx.txt",
                                ">f+++++++++ objects/cluster/clusterrole/ingress-nginx.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-grafana-clusterrole.txt",
                                ">f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-grafana-clusterrole.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-kube-state-metrics.txt",
                                ">f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-kube-state-metrics.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-operator.txt",
                                ">f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-operator.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-prometheus.txt",
                                ">f+++++++++ objects/cluster/clusterrole/kube-prometheus-stack-prometheus.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/kubeadm:get-nodes.txt",
                                ">f+++++++++ objects/cluster/clusterrole/kubeadm:get-nodes.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/local-path-provisioner.txt",
                                ">f+++++++++ objects/cluster/clusterrole/local-path-provisioner.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/node-feature-discovery-gc.txt",
                                ">f+++++++++ objects/cluster/clusterrole/node-feature-discovery-gc.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/node-feature-discovery.txt",
                                ">f+++++++++ objects/cluster/clusterrole/node-feature-discovery.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/nova-bootstrap.txt",
                                ">f+++++++++ objects/cluster/clusterrole/nova-bootstrap.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/nova-cell-setup.txt",
                                ">f+++++++++ objects/cluster/clusterrole/nova-cell-setup.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/objectstorage-provisioner-role.txt",
                                ">f+++++++++ objects/cluster/clusterrole/objectstorage-provisioner-role.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/orc-image-editor-role.txt",
                                ">f+++++++++ objects/cluster/clusterrole/orc-image-editor-role.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/orc-image-viewer-role.txt",
                                ">f+++++++++ objects/cluster/clusterrole/orc-image-viewer-role.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/orc-manager-role.txt",
                                ">f+++++++++ objects/cluster/clusterrole/orc-manager-role.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/orc-metrics-auth-role.txt",
                                ">f+++++++++ objects/cluster/clusterrole/orc-metrics-auth-role.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/orc-metrics-reader.txt",
                                ">f+++++++++ objects/cluster/clusterrole/orc-metrics-reader.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/ovn-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/ovn-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack-admin.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack-admin.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack-edit.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack-edit.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack-view.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack-view.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-cluster-operator-openstack.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack-admin.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack-admin.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack-edit.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack-edit.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack-view.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack-view.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rabbitmq-messaging-topology-operator-openstack.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rbd-csi-nodeplugin.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rbd-csi-nodeplugin.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rbd-external-provisioner-runner.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rbd-external-provisioner-runner.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rook-ceph-cluster-mgmt.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rook-ceph-cluster-mgmt.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rook-ceph-global.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rook-ceph-global.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rook-ceph-mgr-cluster.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rook-ceph-mgr-cluster.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rook-ceph-mgr-system.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rook-ceph-mgr-system.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rook-ceph-object-bucket.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rook-ceph-object-bucket.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rook-ceph-osd.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rook-ceph-osd.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/rook-ceph-system.txt",
                                ">f+++++++++ objects/cluster/clusterrole/rook-ceph-system.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/secretgen-controller-cluster-role.txt",
                                ">f+++++++++ objects/cluster/clusterrole/secretgen-controller-cluster-role.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:aggregate-to-admin.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:aggregate-to-admin.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:aggregate-to-edit.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:aggregate-to-edit.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:aggregate-to-view.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:aggregate-to-view.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:auth-delegator.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:auth-delegator.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:basic-user.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:basic-user.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:nodeclient.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:nodeclient.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-approver.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-approver.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-kubelet-approver.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kube-apiserver-client-kubelet-approver.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kubelet-serving-approver.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:kubelet-serving-approver.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:legacy-unknown-approver.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:certificates.k8s.io:legacy-unknown-approver.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:attachdetach-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:attachdetach-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:certificate-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:certificate-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:clusterrole-aggregation-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:clusterrole-aggregation-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:cronjob-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:cronjob-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:daemon-set-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:daemon-set-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:deployment-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:deployment-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:disruption-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:disruption-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:endpoint-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:endpoint-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:endpointslice-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:endpointslice-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:endpointslicemirroring-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:endpointslicemirroring-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:ephemeral-volume-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:ephemeral-volume-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:expand-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:expand-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:generic-garbage-collector.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:generic-garbage-collector.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:horizontal-pod-autoscaler.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:horizontal-pod-autoscaler.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:job-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:job-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:namespace-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:namespace-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:node-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:node-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:persistent-volume-binder.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:persistent-volume-binder.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:pod-garbage-collector.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:pod-garbage-collector.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:pv-protection-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:pv-protection-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:pvc-protection-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:pvc-protection-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:replicaset-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:replicaset-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:replication-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:replication-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:resourcequota-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:resourcequota-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:root-ca-cert-publisher.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:root-ca-cert-publisher.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:route-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:route-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:service-account-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:service-account-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:service-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:service-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:statefulset-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:statefulset-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:ttl-after-finished-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:ttl-after-finished-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:ttl-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:controller:ttl-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:coredns.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:coredns.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:discovery.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:discovery.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:heapster.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:heapster.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-aggregator.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-aggregator.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-controller-manager.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-controller-manager.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-dns.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-dns.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-scheduler.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:kube-scheduler.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:kubelet-api-admin.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:kubelet-api-admin.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:monitoring.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:monitoring.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:node-bootstrapper.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:node-bootstrapper.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:node-problem-detector.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:node-problem-detector.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:node-proxier.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:node-proxier.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:node.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:node.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:persistent-volume-provisioner.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:persistent-volume-provisioner.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:public-info-viewer.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:public-info-viewer.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:service-account-issuer-discovery.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:service-account-issuer-discovery.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/system:volume-scheduler.txt",
                                ">f+++++++++ objects/cluster/clusterrole/system:volume-scheduler.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/vector.txt",
                                ">f+++++++++ objects/cluster/clusterrole/vector.yaml",
                                ">f+++++++++ objects/cluster/clusterrole/view.txt",
                                ">f+++++++++ objects/cluster/clusterrole/view.yaml",
                                "cd+++++++++ objects/cluster/clusterrolebinding/",
                                ">f+++++++++ objects/cluster/clusterrolebinding/capi-kubeadm-bootstrap-manager-rolebinding.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/capi-kubeadm-bootstrap-manager-rolebinding.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/capi-kubeadm-control-plane-manager-rolebinding.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/capi-kubeadm-control-plane-manager-rolebinding.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/capi-manager-rolebinding.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/capi-manager-rolebinding.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/capo-manager-rolebinding.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/capo-manager-rolebinding.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cephfs-csi-nodeplugin-role.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cephfs-csi-nodeplugin-role.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cephfs-csi-provisioner-role.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cephfs-csi-provisioner-role.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-cainjector.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-cainjector.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-approve:cert-manager-io.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-approve:cert-manager-io.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-certificates.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-certificates.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-certificatesigningrequests.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-certificatesigningrequests.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-challenges.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-challenges.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-clusterissuers.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-clusterissuers.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-ingress-shim.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-ingress-shim.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-issuers.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-issuers.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-orders.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-controller-orders.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-webhook:subjectaccessreviews.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cert-manager-webhook:subjectaccessreviews.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cilium-operator.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cilium-operator.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cilium.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cilium.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cluster-admin.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/cluster-admin.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/goldpinger-clusterrolebinding.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/goldpinger-clusterrolebinding.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/ingress-nginx.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/ingress-nginx.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-grafana-clusterrolebinding.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-grafana-clusterrolebinding.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-kube-state-metrics.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-kube-state-metrics.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-operator.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-operator.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-prometheus.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kube-prometheus-stack-prometheus.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:get-nodes.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:get-nodes.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:kubelet-bootstrap.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:kubelet-bootstrap.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-bootstrap.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-bootstrap.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-certificate-rotation.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-autoapprove-certificate-rotation.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-proxier.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/kubeadm:node-proxier.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/local-path-provisioner.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/local-path-provisioner.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/magnum-cluster-api.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/magnum-cluster-api.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/node-feature-discovery-gc.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/node-feature-discovery-gc.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/node-feature-discovery.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/node-feature-discovery.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/nova-bootstrap.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/nova-bootstrap.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/nova-cell-setup.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/nova-cell-setup.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/objectstorage-provisioner-role-binding.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/objectstorage-provisioner-role-binding.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/orc-manager-rolebinding.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/orc-manager-rolebinding.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/orc-metrics-auth-rolebinding.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/orc-metrics-auth-rolebinding.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/ovn-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/ovn-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rabbitmq-cluster-operator-openstack.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rabbitmq-cluster-operator-openstack.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rabbitmq-messaging-topology-operator-openstack.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rabbitmq-messaging-topology-operator-openstack.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rbd-csi-nodeplugin.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rbd-csi-nodeplugin.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rbd-csi-provisioner-role.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rbd-csi-provisioner-role.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-global.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-global.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-mgr-cluster-openstack.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-mgr-cluster-openstack.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-mgr-cluster.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-mgr-cluster.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-object-bucket.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-object-bucket.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-osd-openstack.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-osd-openstack.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-osd.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-osd.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-system.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/rook-ceph-system.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/secretgen-controller-cluster-role-binding.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/secretgen-controller-cluster-role-binding.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:basic-user.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:basic-user.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:attachdetach-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:attachdetach-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:certificate-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:certificate-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:clusterrole-aggregation-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:clusterrole-aggregation-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:cronjob-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:cronjob-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:daemon-set-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:daemon-set-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:deployment-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:deployment-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:disruption-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:disruption-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpoint-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpoint-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslice-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslice-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslicemirroring-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:endpointslicemirroring-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:ephemeral-volume-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:ephemeral-volume-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:expand-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:expand-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:generic-garbage-collector.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:generic-garbage-collector.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:horizontal-pod-autoscaler.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:horizontal-pod-autoscaler.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:job-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:job-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:namespace-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:namespace-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:node-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:node-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:persistent-volume-binder.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:persistent-volume-binder.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:pod-garbage-collector.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:pod-garbage-collector.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:pv-protection-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:pv-protection-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:pvc-protection-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:pvc-protection-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:replicaset-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:replicaset-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:replication-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:replication-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:resourcequota-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:resourcequota-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:root-ca-cert-publisher.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:root-ca-cert-publisher.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:route-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:route-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-account-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-account-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:service-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:statefulset-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:statefulset-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-after-finished-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-after-finished-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-controller.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:controller:ttl-controller.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:coredns.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:coredns.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:discovery.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:discovery.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:kube-controller-manager.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:kube-controller-manager.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:kube-dns.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:kube-dns.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:kube-scheduler.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:kube-scheduler.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:monitoring.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:monitoring.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:node-proxier.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:node-proxier.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:node.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:node.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:public-info-viewer.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:public-info-viewer.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:service-account-issuer-discovery.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:service-account-issuer-discovery.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:volume-scheduler.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/system:volume-scheduler.yaml",
                                ">f+++++++++ objects/cluster/clusterrolebinding/vector.txt",
                                ">f+++++++++ objects/cluster/clusterrolebinding/vector.yaml",
                                "cd+++++++++ objects/cluster/namespace/",
                                ">f+++++++++ objects/cluster/namespace/auth-system.txt",
                                ">f+++++++++ objects/cluster/namespace/auth-system.yaml",
                                ">f+++++++++ objects/cluster/namespace/capi-kubeadm-bootstrap-system.txt",
                                ">f+++++++++ objects/cluster/namespace/capi-kubeadm-bootstrap-system.yaml",
                                ">f+++++++++ objects/cluster/namespace/capi-kubeadm-control-plane-system.txt",
                                ">f+++++++++ objects/cluster/namespace/capi-kubeadm-control-plane-system.yaml",
                                ">f+++++++++ objects/cluster/namespace/capi-system.txt",
                                ">f+++++++++ objects/cluster/namespace/capi-system.yaml",
                                ">f+++++++++ objects/cluster/namespace/capo-system.txt",
                                ">f+++++++++ objects/cluster/namespace/capo-system.yaml",
                                ">f+++++++++ objects/cluster/namespace/cert-manager.txt",
                                ">f+++++++++ objects/cluster/namespace/cert-manager.yaml",
                                ">f+++++++++ objects/cluster/namespace/default.txt",
                                ">f+++++++++ objects/cluster/namespace/default.yaml",
                                ">f+++++++++ objects/cluster/namespace/ingress-nginx.txt",
                                ">f+++++++++ objects/cluster/namespace/ingress-nginx.yaml",
                                ">f+++++++++ objects/cluster/namespace/kube-node-lease.txt",
                                ">f+++++++++ objects/cluster/namespace/kube-node-lease.yaml",
                                ">f+++++++++ objects/cluster/namespace/kube-public.txt",
                                ">f+++++++++ objects/cluster/namespace/kube-public.yaml",
                                ">f+++++++++ objects/cluster/namespace/kube-system.txt",
                                ">f+++++++++ objects/cluster/namespace/kube-system.yaml",
                                ">f+++++++++ objects/cluster/namespace/local-path-storage.txt",
                                ">f+++++++++ objects/cluster/namespace/local-path-storage.yaml",
                                ">f+++++++++ objects/cluster/namespace/magnum-system.txt",
                                ">f+++++++++ objects/cluster/namespace/magnum-system.yaml",
                                ">f+++++++++ objects/cluster/namespace/monitoring.txt",
                                ">f+++++++++ objects/cluster/namespace/monitoring.yaml",
                                ">f+++++++++ objects/cluster/namespace/openstack.txt",
                                ">f+++++++++ objects/cluster/namespace/openstack.yaml",
                                ">f+++++++++ objects/cluster/namespace/orc-system.txt",
                                ">f+++++++++ objects/cluster/namespace/orc-system.yaml",
                                ">f+++++++++ objects/cluster/namespace/rook-ceph.txt",
                                ">f+++++++++ objects/cluster/namespace/rook-ceph.yaml",
                                ">f+++++++++ objects/cluster/namespace/secretgen-controller.txt",
                                ">f+++++++++ objects/cluster/namespace/secretgen-controller.yaml",
                                "cd+++++++++ objects/cluster/node/",
                                ">f+++++++++ objects/cluster/node/instance.txt",
                                ">f+++++++++ objects/cluster/node/instance.yaml",
                                "cd+++++++++ objects/cluster/storageclass/",
                                ">f+++++++++ objects/cluster/storageclass/general.txt",
                                ">f+++++++++ objects/cluster/storageclass/general.yaml",
                                "cd+++++++++ objects/namespaced/",
                                "cd+++++++++ objects/namespaced/auth-system/",
                                "cd+++++++++ objects/namespaced/auth-system/configmaps/",
                                ">f+++++++++ objects/namespaced/auth-system/configmaps/keycloak-env-vars.txt",
                                ">f+++++++++ objects/namespaced/auth-system/configmaps/keycloak-env-vars.yaml",
                                ">f+++++++++ objects/namespaced/auth-system/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/auth-system/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/auth-system/endpoints/",
                                ">f+++++++++ objects/namespaced/auth-system/endpoints/keycloak-headless.txt",
                                ">f+++++++++ objects/namespaced/auth-system/endpoints/keycloak-headless.yaml",
                                ">f+++++++++ objects/namespaced/auth-system/endpoints/keycloak-metrics.txt",
                                ">f+++++++++ objects/namespaced/auth-system/endpoints/keycloak-metrics.yaml",
                                ">f+++++++++ objects/namespaced/auth-system/endpoints/keycloak.txt",
                                ">f+++++++++ objects/namespaced/auth-system/endpoints/keycloak.yaml",
                                "cd+++++++++ objects/namespaced/auth-system/ingresses/",
                                ">f+++++++++ objects/namespaced/auth-system/ingresses/keycloak.txt",
                                ">f+++++++++ objects/namespaced/auth-system/ingresses/keycloak.yaml",
                                "cd+++++++++ objects/namespaced/auth-system/networkpolicies/",
                                ">f+++++++++ objects/namespaced/auth-system/networkpolicies/keycloak.txt",
                                ">f+++++++++ objects/namespaced/auth-system/networkpolicies/keycloak.yaml",
                                "cd+++++++++ objects/namespaced/auth-system/pods/",
                                ">f+++++++++ objects/namespaced/auth-system/pods/keycloak-0.txt",
                                ">f+++++++++ objects/namespaced/auth-system/pods/keycloak-0.yaml",
                                "cd+++++++++ objects/namespaced/auth-system/secrets/",
                                ">f+++++++++ objects/namespaced/auth-system/secrets/keycloak-externaldb.txt",
                                ">f+++++++++ objects/namespaced/auth-system/secrets/keycloak-externaldb.yaml",
                                ">f+++++++++ objects/namespaced/auth-system/secrets/keycloak.199-204-45-156.nip.io-tls.txt",
                                ">f+++++++++ objects/namespaced/auth-system/secrets/keycloak.199-204-45-156.nip.io-tls.yaml",
                                ">f+++++++++ objects/namespaced/auth-system/secrets/keycloak.txt",
                                ">f+++++++++ objects/namespaced/auth-system/secrets/keycloak.yaml",
                                ">f+++++++++ objects/namespaced/auth-system/secrets/sh.helm.release.v1.keycloak.v1.txt",
                                ">f+++++++++ objects/namespaced/auth-system/secrets/sh.helm.release.v1.keycloak.v1.yaml",
                                "cd+++++++++ objects/namespaced/auth-system/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/auth-system/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/auth-system/serviceaccounts/default.yaml",
                                ">f+++++++++ objects/namespaced/auth-system/serviceaccounts/keycloak.txt",
                                ">f+++++++++ objects/namespaced/auth-system/serviceaccounts/keycloak.yaml",
                                "cd+++++++++ objects/namespaced/auth-system/services/",
                                ">f+++++++++ objects/namespaced/auth-system/services/keycloak-headless.txt",
                                ">f+++++++++ objects/namespaced/auth-system/services/keycloak-headless.yaml",
                                ">f+++++++++ objects/namespaced/auth-system/services/keycloak-metrics.txt",
                                ">f+++++++++ objects/namespaced/auth-system/services/keycloak-metrics.yaml",
                                ">f+++++++++ objects/namespaced/auth-system/services/keycloak.txt",
                                ">f+++++++++ objects/namespaced/auth-system/services/keycloak.yaml",
                                "cd+++++++++ objects/namespaced/auth-system/statefulsets/",
                                ">f+++++++++ objects/namespaced/auth-system/statefulsets/keycloak.txt",
                                ">f+++++++++ objects/namespaced/auth-system/statefulsets/keycloak.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/configmaps/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/deployment/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/deployment/capi-kubeadm-bootstrap-controller-manager.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/deployment/capi-kubeadm-bootstrap-controller-manager.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/endpoints/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/endpoints/capi-kubeadm-bootstrap-webhook-service.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/endpoints/capi-kubeadm-bootstrap-webhook-service.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/pods/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/pods/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/pods/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/rolebindings/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/rolebindings/capi-kubeadm-bootstrap-leader-election-rolebinding.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/rolebindings/capi-kubeadm-bootstrap-leader-election-rolebinding.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/roles/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/roles/capi-kubeadm-bootstrap-leader-election-role.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/roles/capi-kubeadm-bootstrap-leader-election-role.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/secrets/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/secrets/capi-kubeadm-bootstrap-webhook-service-cert.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/secrets/capi-kubeadm-bootstrap-webhook-service-cert.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/serviceaccounts/capi-kubeadm-bootstrap-manager.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/serviceaccounts/capi-kubeadm-bootstrap-manager.yaml",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/serviceaccounts/default.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/services/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/services/capi-kubeadm-bootstrap-webhook-service.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-bootstrap-system/services/capi-kubeadm-bootstrap-webhook-service.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/configmaps/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/deployment/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/deployment/capi-kubeadm-control-plane-controller-manager.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/deployment/capi-kubeadm-control-plane-controller-manager.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/endpoints/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/endpoints/capi-kubeadm-control-plane-webhook-service.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/endpoints/capi-kubeadm-control-plane-webhook-service.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/pods/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/pods/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/pods/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/rolebindings/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/rolebindings/capi-kubeadm-control-plane-leader-election-rolebinding.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/rolebindings/capi-kubeadm-control-plane-leader-election-rolebinding.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/roles/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/roles/capi-kubeadm-control-plane-leader-election-role.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/roles/capi-kubeadm-control-plane-leader-election-role.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/secrets/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/secrets/capi-kubeadm-control-plane-webhook-service-cert.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/secrets/capi-kubeadm-control-plane-webhook-service-cert.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/serviceaccounts/capi-kubeadm-control-plane-manager.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/serviceaccounts/capi-kubeadm-control-plane-manager.yaml",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/serviceaccounts/default.yaml",
                                "cd+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/services/",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/services/capi-kubeadm-control-plane-webhook-service.txt",
                                ">f+++++++++ objects/namespaced/capi-kubeadm-control-plane-system/services/capi-kubeadm-control-plane-webhook-service.yaml",
                                "cd+++++++++ objects/namespaced/capi-system/",
                                "cd+++++++++ objects/namespaced/capi-system/configmaps/",
                                ">f+++++++++ objects/namespaced/capi-system/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/capi-system/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/capi-system/deployment/",
                                ">f+++++++++ objects/namespaced/capi-system/deployment/capi-controller-manager.txt",
                                ">f+++++++++ objects/namespaced/capi-system/deployment/capi-controller-manager.yaml",
                                "cd+++++++++ objects/namespaced/capi-system/endpoints/",
                                ">f+++++++++ objects/namespaced/capi-system/endpoints/capi-webhook-service.txt",
                                ">f+++++++++ objects/namespaced/capi-system/endpoints/capi-webhook-service.yaml",
                                "cd+++++++++ objects/namespaced/capi-system/pods/",
                                ">f+++++++++ objects/namespaced/capi-system/pods/capi-controller-manager-bc4cf8c95-w8p6b.txt",
                                ">f+++++++++ objects/namespaced/capi-system/pods/capi-controller-manager-bc4cf8c95-w8p6b.yaml",
                                "cd+++++++++ objects/namespaced/capi-system/rolebindings/",
                                ">f+++++++++ objects/namespaced/capi-system/rolebindings/capi-leader-election-rolebinding.txt",
                                ">f+++++++++ objects/namespaced/capi-system/rolebindings/capi-leader-election-rolebinding.yaml",
                                "cd+++++++++ objects/namespaced/capi-system/roles/",
                                ">f+++++++++ objects/namespaced/capi-system/roles/capi-leader-election-role.txt",
                                ">f+++++++++ objects/namespaced/capi-system/roles/capi-leader-election-role.yaml",
                                "cd+++++++++ objects/namespaced/capi-system/secrets/",
                                ">f+++++++++ objects/namespaced/capi-system/secrets/capi-webhook-service-cert.txt",
                                ">f+++++++++ objects/namespaced/capi-system/secrets/capi-webhook-service-cert.yaml",
                                "cd+++++++++ objects/namespaced/capi-system/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/capi-system/serviceaccounts/capi-manager.txt",
                                ">f+++++++++ objects/namespaced/capi-system/serviceaccounts/capi-manager.yaml",
                                ">f+++++++++ objects/namespaced/capi-system/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/capi-system/serviceaccounts/default.yaml",
                                "cd+++++++++ objects/namespaced/capi-system/services/",
                                ">f+++++++++ objects/namespaced/capi-system/services/capi-webhook-service.txt",
                                ">f+++++++++ objects/namespaced/capi-system/services/capi-webhook-service.yaml",
                                "cd+++++++++ objects/namespaced/capo-system/",
                                "cd+++++++++ objects/namespaced/capo-system/configmaps/",
                                ">f+++++++++ objects/namespaced/capo-system/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/capo-system/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/capo-system/deployment/",
                                ">f+++++++++ objects/namespaced/capo-system/deployment/capo-controller-manager.txt",
                                ">f+++++++++ objects/namespaced/capo-system/deployment/capo-controller-manager.yaml",
                                "cd+++++++++ objects/namespaced/capo-system/endpoints/",
                                ">f+++++++++ objects/namespaced/capo-system/endpoints/capo-webhook-service.txt",
                                ">f+++++++++ objects/namespaced/capo-system/endpoints/capo-webhook-service.yaml",
                                "cd+++++++++ objects/namespaced/capo-system/pods/",
                                ">f+++++++++ objects/namespaced/capo-system/pods/capo-controller-manager-6975759b4b-tkxrs.txt",
                                ">f+++++++++ objects/namespaced/capo-system/pods/capo-controller-manager-6975759b4b-tkxrs.yaml",
                                "cd+++++++++ objects/namespaced/capo-system/rolebindings/",
                                ">f+++++++++ objects/namespaced/capo-system/rolebindings/capo-leader-election-rolebinding.txt",
                                ">f+++++++++ objects/namespaced/capo-system/rolebindings/capo-leader-election-rolebinding.yaml",
                                "cd+++++++++ objects/namespaced/capo-system/roles/",
                                ">f+++++++++ objects/namespaced/capo-system/roles/capo-leader-election-role.txt",
                                ">f+++++++++ objects/namespaced/capo-system/roles/capo-leader-election-role.yaml",
                                "cd+++++++++ objects/namespaced/capo-system/secrets/",
                                ">f+++++++++ objects/namespaced/capo-system/secrets/capo-webhook-service-cert.txt",
                                ">f+++++++++ objects/namespaced/capo-system/secrets/capo-webhook-service-cert.yaml",
                                "cd+++++++++ objects/namespaced/capo-system/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/capo-system/serviceaccounts/capo-manager.txt",
                                ">f+++++++++ objects/namespaced/capo-system/serviceaccounts/capo-manager.yaml",
                                ">f+++++++++ objects/namespaced/capo-system/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/capo-system/serviceaccounts/default.yaml",
                                "cd+++++++++ objects/namespaced/capo-system/services/",
                                ">f+++++++++ objects/namespaced/capo-system/services/capo-webhook-service.txt",
                                ">f+++++++++ objects/namespaced/capo-system/services/capo-webhook-service.yaml",
                                "cd+++++++++ objects/namespaced/cert-manager/",
                                "cd+++++++++ objects/namespaced/cert-manager/configmaps/",
                                ">f+++++++++ objects/namespaced/cert-manager/configmaps/cert-manager-webhook.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/configmaps/cert-manager-webhook.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/cert-manager/deployment/",
                                ">f+++++++++ objects/namespaced/cert-manager/deployment/cert-manager-cainjector.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/deployment/cert-manager-cainjector.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/deployment/cert-manager-webhook.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/deployment/cert-manager-webhook.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/deployment/cert-manager.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/deployment/cert-manager.yaml",
                                "cd+++++++++ objects/namespaced/cert-manager/endpoints/",
                                ">f+++++++++ objects/namespaced/cert-manager/endpoints/cert-manager-webhook.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/endpoints/cert-manager-webhook.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/endpoints/cert-manager.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/endpoints/cert-manager.yaml",
                                "cd+++++++++ objects/namespaced/cert-manager/pods/",
                                ">f+++++++++ objects/namespaced/cert-manager/pods/cert-manager-75c4c745bc-45s4r.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/pods/cert-manager-75c4c745bc-45s4r.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/pods/cert-manager-cainjector-64b59ddb75-tl5x7.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/pods/cert-manager-cainjector-64b59ddb75-tl5x7.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/pods/cert-manager-webhook-548949fc64-vkrlt.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/pods/cert-manager-webhook-548949fc64-vkrlt.yaml",
                                "cd+++++++++ objects/namespaced/cert-manager/rolebindings/",
                                ">f+++++++++ objects/namespaced/cert-manager/rolebindings/cert-manager-cainjector:leaderelection.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/rolebindings/cert-manager-cainjector:leaderelection.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/rolebindings/cert-manager-webhook:dynamic-serving.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/rolebindings/cert-manager-webhook:dynamic-serving.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/rolebindings/cert-manager:leaderelection.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/rolebindings/cert-manager:leaderelection.yaml",
                                "cd+++++++++ objects/namespaced/cert-manager/roles/",
                                ">f+++++++++ objects/namespaced/cert-manager/roles/cert-manager-cainjector:leaderelection.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/roles/cert-manager-cainjector:leaderelection.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/roles/cert-manager-webhook:dynamic-serving.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/roles/cert-manager-webhook:dynamic-serving.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/roles/cert-manager:leaderelection.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/roles/cert-manager:leaderelection.yaml",
                                "cd+++++++++ objects/namespaced/cert-manager/secrets/",
                                ">f+++++++++ objects/namespaced/cert-manager/secrets/cert-manager-selfsigned-ca.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/secrets/cert-manager-selfsigned-ca.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/secrets/cert-manager-webhook-ca.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/secrets/cert-manager-webhook-ca.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/secrets/kube-prometheus-stack-ca.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/secrets/kube-prometheus-stack-ca.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/secrets/sh.helm.release.v1.cert-manager.v1.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/secrets/sh.helm.release.v1.cert-manager.v1.yaml",
                                "cd+++++++++ objects/namespaced/cert-manager/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/cert-manager/serviceaccounts/cert-manager-cainjector.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/serviceaccounts/cert-manager-cainjector.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/serviceaccounts/cert-manager-webhook.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/serviceaccounts/cert-manager-webhook.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/serviceaccounts/cert-manager.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/serviceaccounts/cert-manager.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/serviceaccounts/default.yaml",
                                "cd+++++++++ objects/namespaced/cert-manager/services/",
                                ">f+++++++++ objects/namespaced/cert-manager/services/cert-manager-webhook.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/services/cert-manager-webhook.yaml",
                                ">f+++++++++ objects/namespaced/cert-manager/services/cert-manager.txt",
                                ">f+++++++++ objects/namespaced/cert-manager/services/cert-manager.yaml",
                                "cd+++++++++ objects/namespaced/default/",
                                "cd+++++++++ objects/namespaced/default/configmaps/",
                                ">f+++++++++ objects/namespaced/default/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/default/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/default/endpoints/",
                                ">f+++++++++ objects/namespaced/default/endpoints/kubernetes.txt",
                                ">f+++++++++ objects/namespaced/default/endpoints/kubernetes.yaml",
                                "cd+++++++++ objects/namespaced/default/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/default/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/default/serviceaccounts/default.yaml",
                                "cd+++++++++ objects/namespaced/default/services/",
                                ">f+++++++++ objects/namespaced/default/services/kubernetes.txt",
                                ">f+++++++++ objects/namespaced/default/services/kubernetes.yaml",
                                "cd+++++++++ objects/namespaced/ingress-nginx/",
                                "cd+++++++++ objects/namespaced/ingress-nginx/configmaps/",
                                ">f+++++++++ objects/namespaced/ingress-nginx/configmaps/ingress-nginx-controller.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/configmaps/ingress-nginx-controller.yaml",
                                ">f+++++++++ objects/namespaced/ingress-nginx/configmaps/ingress-nginx-tcp.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/configmaps/ingress-nginx-tcp.yaml",
                                ">f+++++++++ objects/namespaced/ingress-nginx/configmaps/ingress-nginx-udp.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/configmaps/ingress-nginx-udp.yaml",
                                ">f+++++++++ objects/namespaced/ingress-nginx/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/ingress-nginx/daemonsets/",
                                ">f+++++++++ objects/namespaced/ingress-nginx/daemonsets/ingress-nginx-controller.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/daemonsets/ingress-nginx-controller.yaml",
                                "cd+++++++++ objects/namespaced/ingress-nginx/deployment/",
                                ">f+++++++++ objects/namespaced/ingress-nginx/deployment/ingress-nginx-defaultbackend.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/deployment/ingress-nginx-defaultbackend.yaml",
                                "cd+++++++++ objects/namespaced/ingress-nginx/endpoints/",
                                ">f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-controller-admission.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-controller-admission.yaml",
                                ">f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-controller-metrics.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-controller-metrics.yaml",
                                ">f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-controller.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-controller.yaml",
                                ">f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-defaultbackend.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/endpoints/ingress-nginx-defaultbackend.yaml",
                                "cd+++++++++ objects/namespaced/ingress-nginx/pods/",
                                ">f+++++++++ objects/namespaced/ingress-nginx/pods/ingress-nginx-controller-j4bqv.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/pods/ingress-nginx-controller-j4bqv.yaml",
                                ">f+++++++++ objects/namespaced/ingress-nginx/pods/ingress-nginx-defaultbackend-6987ff55cf-gpx4l.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/pods/ingress-nginx-defaultbackend-6987ff55cf-gpx4l.yaml",
                                "cd+++++++++ objects/namespaced/ingress-nginx/rolebindings/",
                                ">f+++++++++ objects/namespaced/ingress-nginx/rolebindings/ingress-nginx.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/rolebindings/ingress-nginx.yaml",
                                "cd+++++++++ objects/namespaced/ingress-nginx/roles/",
                                ">f+++++++++ objects/namespaced/ingress-nginx/roles/ingress-nginx.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/roles/ingress-nginx.yaml",
                                "cd+++++++++ objects/namespaced/ingress-nginx/secrets/",
                                ">f+++++++++ objects/namespaced/ingress-nginx/secrets/ingress-nginx-admission.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/secrets/ingress-nginx-admission.yaml",
                                ">f+++++++++ objects/namespaced/ingress-nginx/secrets/sh.helm.release.v1.ingress-nginx.v1.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/secrets/sh.helm.release.v1.ingress-nginx.v1.yaml",
                                "cd+++++++++ objects/namespaced/ingress-nginx/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/ingress-nginx/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/serviceaccounts/default.yaml",
                                ">f+++++++++ objects/namespaced/ingress-nginx/serviceaccounts/ingress-nginx-backend.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/serviceaccounts/ingress-nginx-backend.yaml",
                                ">f+++++++++ objects/namespaced/ingress-nginx/serviceaccounts/ingress-nginx.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/serviceaccounts/ingress-nginx.yaml",
                                "cd+++++++++ objects/namespaced/ingress-nginx/services/",
                                ">f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-controller-admission.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-controller-admission.yaml",
                                ">f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-controller-metrics.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-controller-metrics.yaml",
                                ">f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-controller.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-controller.yaml",
                                ">f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-defaultbackend.txt",
                                ">f+++++++++ objects/namespaced/ingress-nginx/services/ingress-nginx-defaultbackend.yaml",
                                "cd+++++++++ objects/namespaced/kube-node-lease/",
                                "cd+++++++++ objects/namespaced/kube-node-lease/configmaps/",
                                ">f+++++++++ objects/namespaced/kube-node-lease/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/kube-node-lease/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/kube-node-lease/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/kube-node-lease/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/kube-node-lease/serviceaccounts/default.yaml",
                                "cd+++++++++ objects/namespaced/kube-public/",
                                "cd+++++++++ objects/namespaced/kube-public/configmaps/",
                                ">f+++++++++ objects/namespaced/kube-public/configmaps/cluster-info.txt",
                                ">f+++++++++ objects/namespaced/kube-public/configmaps/cluster-info.yaml",
                                ">f+++++++++ objects/namespaced/kube-public/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/kube-public/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/kube-public/rolebindings/",
                                ">f+++++++++ objects/namespaced/kube-public/rolebindings/kubeadm:bootstrap-signer-clusterinfo.txt",
                                ">f+++++++++ objects/namespaced/kube-public/rolebindings/kubeadm:bootstrap-signer-clusterinfo.yaml",
                                ">f+++++++++ objects/namespaced/kube-public/rolebindings/system:controller:bootstrap-signer.txt",
                                ">f+++++++++ objects/namespaced/kube-public/rolebindings/system:controller:bootstrap-signer.yaml",
                                "cd+++++++++ objects/namespaced/kube-public/roles/",
                                ">f+++++++++ objects/namespaced/kube-public/roles/kubeadm:bootstrap-signer-clusterinfo.txt",
                                ">f+++++++++ objects/namespaced/kube-public/roles/kubeadm:bootstrap-signer-clusterinfo.yaml",
                                ">f+++++++++ objects/namespaced/kube-public/roles/system:controller:bootstrap-signer.txt",
                                ">f+++++++++ objects/namespaced/kube-public/roles/system:controller:bootstrap-signer.yaml",
                                "cd+++++++++ objects/namespaced/kube-public/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/kube-public/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/kube-public/serviceaccounts/default.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/",
                                "cd+++++++++ objects/namespaced/kube-system/configmaps/",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/cilium-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/cilium-config.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/coredns.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/coredns.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/extension-apiserver-authentication.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/extension-apiserver-authentication.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kube-apiserver-legacy-service-account-token-tracking.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kube-apiserver-legacy-service-account-token-tracking.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kube-proxy.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kube-proxy.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kube-root-ca.crt.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kubeadm-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kubeadm-config.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kubelet-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/configmaps/kubelet-config.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/daemonsets/",
                                ">f+++++++++ objects/namespaced/kube-system/daemonsets/cilium.txt",
                                ">f+++++++++ objects/namespaced/kube-system/daemonsets/cilium.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/daemonsets/kube-proxy.txt",
                                ">f+++++++++ objects/namespaced/kube-system/daemonsets/kube-proxy.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/deployment/",
                                ">f+++++++++ objects/namespaced/kube-system/deployment/cilium-operator.txt",
                                ">f+++++++++ objects/namespaced/kube-system/deployment/cilium-operator.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/deployment/coredns.txt",
                                ">f+++++++++ objects/namespaced/kube-system/deployment/coredns.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/endpoints/",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-dns.txt",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-dns.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-coredns.txt",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-coredns.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-controller-manager.txt",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-controller-manager.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-etcd.txt",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-etcd.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-proxy.txt",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-proxy.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-scheduler.txt",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kube-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kubelet.txt",
                                ">f+++++++++ objects/namespaced/kube-system/endpoints/kube-prometheus-stack-kubelet.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/pods/",
                                ">f+++++++++ objects/namespaced/kube-system/pods/cilium-operator-869df985b8-kszk2.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/cilium-operator-869df985b8-kszk2.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/cilium-vdz4f.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/cilium-vdz4f.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/coredns-67659f764b-6f2mm.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/coredns-67659f764b-6f2mm.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/coredns-67659f764b-j6fp4.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/coredns-67659f764b-j6fp4.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/etcd-instance.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/etcd-instance.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-apiserver-instance.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-apiserver-instance.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-controller-manager-instance.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-controller-manager-instance.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-proxy-sp2vs.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-proxy-sp2vs.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-scheduler-instance.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-scheduler-instance.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-vip-instance.txt",
                                ">f+++++++++ objects/namespaced/kube-system/pods/kube-vip-instance.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/rolebindings/",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/cilium-config-agent.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/cilium-config-agent.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kube-proxy.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kube-proxy.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubeadm-certs.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubeadm-certs.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubelet-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:kubelet-config.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:nodes-kubeadm-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/kubeadm:nodes-kubeadm-config.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system::extension-apiserver-authentication-reader.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system::extension-apiserver-authentication-reader.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-controller-manager.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-controller-manager.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-scheduler.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system::leader-locking-kube-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:bootstrap-signer.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:bootstrap-signer.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:cloud-provider.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:cloud-provider.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:token-cleaner.txt",
                                ">f+++++++++ objects/namespaced/kube-system/rolebindings/system:controller:token-cleaner.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/roles/",
                                ">f+++++++++ objects/namespaced/kube-system/roles/cilium-config-agent.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/cilium-config-agent.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/extension-apiserver-authentication-reader.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/extension-apiserver-authentication-reader.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kube-proxy.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kube-proxy.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubeadm-certs.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubeadm-certs.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubelet-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kubeadm:kubelet-config.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kubeadm:nodes-kubeadm-config.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/kubeadm:nodes-kubeadm-config.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-controller-manager.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-controller-manager.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-scheduler.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system::leader-locking-kube-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system:controller:bootstrap-signer.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system:controller:bootstrap-signer.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system:controller:cloud-provider.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system:controller:cloud-provider.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system:controller:token-cleaner.txt",
                                ">f+++++++++ objects/namespaced/kube-system/roles/system:controller:token-cleaner.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/secrets/",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-592olp.txt",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-592olp.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-vahv4y.txt",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/bootstrap-token-vahv4y.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/kubeadm-certs.txt",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/kubeadm-certs.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/sh.helm.release.v1.cilium.v1.txt",
                                ">f+++++++++ objects/namespaced/kube-system/secrets/sh.helm.release.v1.cilium.v1.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/attachdetach-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/attachdetach-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/bootstrap-signer.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/bootstrap-signer.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/certificate-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/certificate-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium-operator.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium-operator.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/cilium.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/clusterrole-aggregation-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/clusterrole-aggregation-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/coredns.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/coredns.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/cronjob-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/cronjob-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/daemon-set-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/daemon-set-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/default.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/deployment-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/deployment-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/disruption-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/disruption-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpoint-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpoint-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslice-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslice-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslicemirroring-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/endpointslicemirroring-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ephemeral-volume-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ephemeral-volume-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/expand-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/expand-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/generic-garbage-collector.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/generic-garbage-collector.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/horizontal-pod-autoscaler.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/horizontal-pod-autoscaler.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/job-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/job-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/kube-proxy.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/kube-proxy.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/namespace-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/namespace-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/node-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/node-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/persistent-volume-binder.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/persistent-volume-binder.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/pod-garbage-collector.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/pod-garbage-collector.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/pv-protection-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/pv-protection-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/pvc-protection-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/pvc-protection-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/replicaset-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/replicaset-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/replication-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/replication-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/resourcequota-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/resourcequota-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/root-ca-cert-publisher.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/root-ca-cert-publisher.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-account-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-account-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/service-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/statefulset-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/statefulset-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/token-cleaner.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/token-cleaner.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-after-finished-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-after-finished-controller.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-controller.txt",
                                ">f+++++++++ objects/namespaced/kube-system/serviceaccounts/ttl-controller.yaml",
                                "cd+++++++++ objects/namespaced/kube-system/services/",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-dns.txt",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-dns.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-coredns.txt",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-coredns.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-controller-manager.txt",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-controller-manager.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-etcd.txt",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-etcd.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-proxy.txt",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-proxy.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-scheduler.txt",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kube-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kubelet.txt",
                                ">f+++++++++ objects/namespaced/kube-system/services/kube-prometheus-stack-kubelet.yaml",
                                "cd+++++++++ objects/namespaced/local-path-storage/",
                                "cd+++++++++ objects/namespaced/local-path-storage/configmaps/",
                                ">f+++++++++ objects/namespaced/local-path-storage/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/local-path-storage/configmaps/kube-root-ca.crt.yaml",
                                ">f+++++++++ objects/namespaced/local-path-storage/configmaps/local-path-config.txt",
                                ">f+++++++++ objects/namespaced/local-path-storage/configmaps/local-path-config.yaml",
                                "cd+++++++++ objects/namespaced/local-path-storage/deployment/",
                                ">f+++++++++ objects/namespaced/local-path-storage/deployment/local-path-provisioner.txt",
                                ">f+++++++++ objects/namespaced/local-path-storage/deployment/local-path-provisioner.yaml",
                                "cd+++++++++ objects/namespaced/local-path-storage/pods/",
                                ">f+++++++++ objects/namespaced/local-path-storage/pods/local-path-provisioner-679c578f5-7h8w5.txt",
                                ">f+++++++++ objects/namespaced/local-path-storage/pods/local-path-provisioner-679c578f5-7h8w5.yaml",
                                "cd+++++++++ objects/namespaced/local-path-storage/secrets/",
                                ">f+++++++++ objects/namespaced/local-path-storage/secrets/sh.helm.release.v1.local-path-provisioner.v1.txt",
                                ">f+++++++++ objects/namespaced/local-path-storage/secrets/sh.helm.release.v1.local-path-provisioner.v1.yaml",
                                "cd+++++++++ objects/namespaced/local-path-storage/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/local-path-storage/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/local-path-storage/serviceaccounts/default.yaml",
                                ">f+++++++++ objects/namespaced/local-path-storage/serviceaccounts/local-path-provisioner.txt",
                                ">f+++++++++ objects/namespaced/local-path-storage/serviceaccounts/local-path-provisioner.yaml",
                                "cd+++++++++ objects/namespaced/magnum-system/",
                                "cd+++++++++ objects/namespaced/magnum-system/configmaps/",
                                ">f+++++++++ objects/namespaced/magnum-system/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/magnum-system/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/magnum-system/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/magnum-system/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/magnum-system/serviceaccounts/default.yaml",
                                "cd+++++++++ objects/namespaced/monitoring/",
                                "cd+++++++++ objects/namespaced/monitoring/configmaps/",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/goldpinger-zap.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/goldpinger-zap.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/ipmi-exporter.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/ipmi-exporter.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-alertmanager-overview.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-alertmanager-overview.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-apiserver.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-apiserver.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-cluster-total.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-cluster-total.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-controller-manager.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-controller-manager.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-ceph-cluster-advanced.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-ceph-cluster-advanced.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-ceph-cluster.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-ceph-cluster.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-goldpinger.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-goldpinger.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-haproxy.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-haproxy.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-host-details.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-host-details.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-hosts-overview.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-hosts-overview.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-node-exporter-full.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-node-exporter-full.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-osd-device-details.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-osd-device-details.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-osds-overview.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-osds-overview.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-pool-detail.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-pool-detail.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-pool-overview.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-pool-overview.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-rbd-details.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-rbd-details.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-rbd-overview.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-dashboard-rbd-overview.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-etcd.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-etcd.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana-config-dashboards.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana-config-dashboards.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana-datasource.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana-datasource.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana-overview.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana-overview.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-grafana.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-coredns.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-coredns.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-cluster.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-cluster.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-multicluster.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-multicluster.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-namespace.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-namespace.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-node.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-node.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-pod.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-pod.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-workload.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-workload.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-workloads-namespace.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-k8s-resources-workloads-namespace.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-kubelet.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-kubelet.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-namespace-by-pod.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-namespace-by-pod.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-namespace-by-workload.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-namespace-by-workload.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-node-cluster-rsrc-use.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-node-cluster-rsrc-use.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-node-exporter.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-node-exporter.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-node-rsrc-use.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-node-rsrc-use.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-nodes-darwin.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-nodes-darwin.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-nodes.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-nodes.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-persistentvolumesusage.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-persistentvolumesusage.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-pod-total.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-pod-total.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-prometheus-tls.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-prometheus-tls.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-prometheus.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-prometheus.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-proxy.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-proxy.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-scheduler.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-workload-total.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-prometheus-stack-workload-total.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/kube-root-ca.crt.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/loki-alerting-rules.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/loki-alerting-rules.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/loki-gateway.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/loki-gateway.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/loki-runtime.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/loki-runtime.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/loki.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/loki.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/node-feature-discovery-master-conf.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/node-feature-discovery-master-conf.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/node-feature-discovery-topology-updater-conf.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/node-feature-discovery-topology-updater-conf.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/node-feature-discovery-worker-conf.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/node-feature-discovery-worker-conf.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/prometheus-kube-prometheus-stack-prometheus-rulefiles-0.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/prometheus-kube-prometheus-stack-prometheus-rulefiles-0.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/vector.txt",
                                ">f+++++++++ objects/namespaced/monitoring/configmaps/vector.yaml",
                                "cd+++++++++ objects/namespaced/monitoring/daemonsets/",
                                ">f+++++++++ objects/namespaced/monitoring/daemonsets/goldpinger.txt",
                                ">f+++++++++ objects/namespaced/monitoring/daemonsets/goldpinger.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/daemonsets/ipmi-exporter.txt",
                                ">f+++++++++ objects/namespaced/monitoring/daemonsets/ipmi-exporter.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/daemonsets/kube-prometheus-stack-prometheus-node-exporter.txt",
                                ">f+++++++++ objects/namespaced/monitoring/daemonsets/kube-prometheus-stack-prometheus-node-exporter.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/daemonsets/node-feature-discovery-worker.txt",
                                ">f+++++++++ objects/namespaced/monitoring/daemonsets/node-feature-discovery-worker.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/daemonsets/vector.txt",
                                ">f+++++++++ objects/namespaced/monitoring/daemonsets/vector.yaml",
                                "cd+++++++++ objects/namespaced/monitoring/deployment/",
                                ">f+++++++++ objects/namespaced/monitoring/deployment/kube-prometheus-stack-grafana.txt",
                                ">f+++++++++ objects/namespaced/monitoring/deployment/kube-prometheus-stack-grafana.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/deployment/kube-prometheus-stack-kube-state-metrics.txt",
                                ">f+++++++++ objects/namespaced/monitoring/deployment/kube-prometheus-stack-kube-state-metrics.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/deployment/kube-prometheus-stack-operator.txt",
                                ">f+++++++++ objects/namespaced/monitoring/deployment/kube-prometheus-stack-operator.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/deployment/loki-gateway.txt",
                                ">f+++++++++ objects/namespaced/monitoring/deployment/loki-gateway.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/deployment/node-feature-discovery-gc.txt",
                                ">f+++++++++ objects/namespaced/monitoring/deployment/node-feature-discovery-gc.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/deployment/node-feature-discovery-master.txt",
                                ">f+++++++++ objects/namespaced/monitoring/deployment/node-feature-discovery-master.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/deployment/prometheus-pushgateway.txt",
                                ">f+++++++++ objects/namespaced/monitoring/deployment/prometheus-pushgateway.yaml",
                                "cd+++++++++ objects/namespaced/monitoring/endpoints/",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/alertmanager-operated.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/alertmanager-operated.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/goldpinger.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/goldpinger.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-alertmanager.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-alertmanager.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-grafana.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-grafana.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-kube-state-metrics.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-kube-state-metrics.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-operator.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-operator.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-prometheus-node-exporter.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-prometheus-node-exporter.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-prometheus.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/kube-prometheus-stack-prometheus.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/loki-chunks-cache.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/loki-chunks-cache.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/loki-gateway.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/loki-gateway.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/loki-headless.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/loki-headless.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/loki-memberlist.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/loki-memberlist.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/loki-results-cache.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/loki-results-cache.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/loki.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/loki.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/prometheus-operated.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/prometheus-operated.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/prometheus-pushgateway.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/prometheus-pushgateway.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/vector-headless.txt",
                                ">f+++++++++ objects/namespaced/monitoring/endpoints/vector-headless.yaml",
                                "cd+++++++++ objects/namespaced/monitoring/ingresses/",
                                ">f+++++++++ objects/namespaced/monitoring/ingresses/kube-prometheus-stack-alertmanager.txt",
                                ">f+++++++++ objects/namespaced/monitoring/ingresses/kube-prometheus-stack-alertmanager.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/ingresses/kube-prometheus-stack-grafana.txt",
                                ">f+++++++++ objects/namespaced/monitoring/ingresses/kube-prometheus-stack-grafana.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/ingresses/kube-prometheus-stack-prometheus.txt",
                                ">f+++++++++ objects/namespaced/monitoring/ingresses/kube-prometheus-stack-prometheus.yaml",
                                "cd+++++++++ objects/namespaced/monitoring/persistentvolumeclaims/",
                                ">f+++++++++ objects/namespaced/monitoring/persistentvolumeclaims/alertmanager-kube-prometheus-stack-alertmanager-db-alertmanager-kube-prometheus-stack-alertmanager-0.txt",
                                ">f+++++++++ objects/namespaced/monitoring/persistentvolumeclaims/alertmanager-kube-prometheus-stack-alertmanager-db-alertmanager-kube-prometheus-stack-alertmanager-0.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/persistentvolumeclaims/prometheus-kube-prometheus-stack-prometheus-db-prometheus-kube-prometheus-stack-prometheus-0.txt",
                                ">f+++++++++ objects/namespaced/monitoring/persistentvolumeclaims/prometheus-kube-prometheus-stack-prometheus-db-prometheus-kube-prometheus-stack-prometheus-0.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/persistentvolumeclaims/storage-loki-0.txt",
                                ">f+++++++++ objects/namespaced/monitoring/persistentvolumeclaims/storage-loki-0.yaml",
                                "cd+++++++++ objects/namespaced/monitoring/pods/",
                                ">f+++++++++ objects/namespaced/monitoring/pods/alertmanager-kube-prometheus-stack-alertmanager-0.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/alertmanager-kube-prometheus-stack-alertmanager-0.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/pods/goldpinger-7jzp8.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/goldpinger-7jzp8.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-grafana-668bfb9659-ft52b.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-grafana-668bfb9659-ft52b.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-operator-cd88cf4bf-lzh7g.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-operator-cd88cf4bf-lzh7g.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-prometheus-node-exporter-59qlm.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/kube-prometheus-stack-prometheus-node-exporter-59qlm.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/pods/loki-0.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/loki-0.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/pods/loki-chunks-cache-0.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/loki-chunks-cache-0.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/pods/loki-gateway-cf54cb88c-zv654.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/loki-gateway-cf54cb88c-zv654.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/pods/loki-results-cache-0.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/loki-results-cache-0.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/pods/node-feature-discovery-gc-6675cbb6d9-zv9sn.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/node-feature-discovery-gc-6675cbb6d9-zv9sn.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/pods/node-feature-discovery-master-8665476dbc-t4z5z.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/node-feature-discovery-master-8665476dbc-t4z5z.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/pods/node-feature-discovery-worker-p8lmk.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/node-feature-discovery-worker-p8lmk.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/pods/prometheus-kube-prometheus-stack-prometheus-0.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/prometheus-kube-prometheus-stack-prometheus-0.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/pods/prometheus-pushgateway-7b8659c68b-28dht.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/prometheus-pushgateway-7b8659c68b-28dht.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/pods/vector-qzjms.txt",
                                ">f+++++++++ objects/namespaced/monitoring/pods/vector-qzjms.yaml",
                                "cd+++++++++ objects/namespaced/monitoring/rolebindings/",
                                ">f+++++++++ objects/namespaced/monitoring/rolebindings/kube-prometheus-stack-grafana.txt",
                                ">f+++++++++ objects/namespaced/monitoring/rolebindings/kube-prometheus-stack-grafana.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/rolebindings/kube-prometheus-stack-pod-tls-sidecar.txt",
                                ">f+++++++++ objects/namespaced/monitoring/rolebindings/kube-prometheus-stack-pod-tls-sidecar.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/rolebindings/node-feature-discovery-worker.txt",
                                ">f+++++++++ objects/namespaced/monitoring/rolebindings/node-feature-discovery-worker.yaml",
                                "cd+++++++++ objects/namespaced/monitoring/roles/",
                                ">f+++++++++ objects/namespaced/monitoring/roles/kube-prometheus-stack-grafana.txt",
                                ">f+++++++++ objects/namespaced/monitoring/roles/kube-prometheus-stack-grafana.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/roles/kube-prometheus-stack-pod-tls-sidecar.txt",
                                ">f+++++++++ objects/namespaced/monitoring/roles/kube-prometheus-stack-pod-tls-sidecar.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/roles/node-feature-discovery-worker.txt",
                                ">f+++++++++ objects/namespaced/monitoring/roles/node-feature-discovery-worker.yaml",
                                "cd+++++++++ objects/namespaced/monitoring/secrets/",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-generated.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-generated.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-tls-assets-0.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-tls-assets-0.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-web-config.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager-web-config.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-kube-prometheus-stack-alertmanager.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-tls.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/alertmanager-tls.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/grafana-tls.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/grafana-tls.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-admission.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-admission.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-alertmanager-client-secret.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-alertmanager-client-secret.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-alertmanager-cookie-secret.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-alertmanager-cookie-secret.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-alertmanager-oauth2-proxy.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-alertmanager-oauth2-proxy.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-etcd-client-cert.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-etcd-client-cert.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-grafana-client-secret.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-grafana-client-secret.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-grafana.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-grafana.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-client-secret.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-client-secret.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-cookie-secret.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-cookie-secret.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-node-exporter-59qlm-tls.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-node-exporter-59qlm-tls.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-oauth2-proxy.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/kube-prometheus-stack-prometheus-oauth2-proxy.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus-0-tls.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus-0-tls.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus-tls-assets-0.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus-tls-assets-0.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus-web-config.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus-web-config.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/prometheus-kube-prometheus-stack-prometheus.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/prometheus-tls.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/prometheus-tls.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.goldpinger.v1.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.goldpinger.v1.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.kube-prometheus-stack.v1.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.kube-prometheus-stack.v1.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.loki.v1.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.loki.v1.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.node-feature-discovery.v1.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.node-feature-discovery.v1.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.prometheus-pushgateway.v1.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.prometheus-pushgateway.v1.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.vector.v1.txt",
                                ">f+++++++++ objects/namespaced/monitoring/secrets/sh.helm.release.v1.vector.v1.yaml",
                                "cd+++++++++ objects/namespaced/monitoring/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/default.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/goldpinger.txt",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/goldpinger.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-alertmanager.txt",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-alertmanager.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-grafana.txt",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-grafana.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-kube-state-metrics.txt",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-kube-state-metrics.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-operator.txt",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-operator.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-prometheus-node-exporter.txt",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-prometheus-node-exporter.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-prometheus.txt",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/kube-prometheus-stack-prometheus.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/loki.txt",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/loki.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/node-feature-discovery-gc.txt",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/node-feature-discovery-gc.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/node-feature-discovery-worker.txt",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/node-feature-discovery-worker.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/node-feature-discovery.txt",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/node-feature-discovery.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/prometheus-pushgateway.txt",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/prometheus-pushgateway.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/vector.txt",
                                ">f+++++++++ objects/namespaced/monitoring/serviceaccounts/vector.yaml",
                                "cd+++++++++ objects/namespaced/monitoring/services/",
                                ">f+++++++++ objects/namespaced/monitoring/services/alertmanager-operated.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/alertmanager-operated.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/goldpinger.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/goldpinger.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-alertmanager.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-alertmanager.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-grafana.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-grafana.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-kube-state-metrics.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-kube-state-metrics.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-operator.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-operator.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-prometheus-node-exporter.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-prometheus-node-exporter.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-prometheus.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/kube-prometheus-stack-prometheus.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/loki-chunks-cache.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/loki-chunks-cache.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/loki-gateway.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/loki-gateway.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/loki-headless.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/loki-headless.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/loki-memberlist.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/loki-memberlist.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/loki-results-cache.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/loki-results-cache.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/loki.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/loki.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/prometheus-operated.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/prometheus-operated.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/prometheus-pushgateway.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/prometheus-pushgateway.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/services/vector-headless.txt",
                                ">f+++++++++ objects/namespaced/monitoring/services/vector-headless.yaml",
                                "cd+++++++++ objects/namespaced/monitoring/statefulsets/",
                                ">f+++++++++ objects/namespaced/monitoring/statefulsets/alertmanager-kube-prometheus-stack-alertmanager.txt",
                                ">f+++++++++ objects/namespaced/monitoring/statefulsets/alertmanager-kube-prometheus-stack-alertmanager.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/statefulsets/loki-chunks-cache.txt",
                                ">f+++++++++ objects/namespaced/monitoring/statefulsets/loki-chunks-cache.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/statefulsets/loki-results-cache.txt",
                                ">f+++++++++ objects/namespaced/monitoring/statefulsets/loki-results-cache.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/statefulsets/loki.txt",
                                ">f+++++++++ objects/namespaced/monitoring/statefulsets/loki.yaml",
                                ">f+++++++++ objects/namespaced/monitoring/statefulsets/prometheus-kube-prometheus-stack-prometheus.txt",
                                ">f+++++++++ objects/namespaced/monitoring/statefulsets/prometheus-kube-prometheus-stack-prometheus.yaml",
                                "cd+++++++++ objects/namespaced/openstack/",
                                "cd+++++++++ objects/namespaced/openstack/configmaps/",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/barbican-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/barbican-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/ceph-csi-config.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/ceph-csi-config.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/ceph-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/ceph-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/cinder-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/cinder-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/glance-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/glance-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/heat-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/heat-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/horizon-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/horizon-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/keepalived-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/keepalived-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/keystone-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/keystone-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/keystone-openid-metadata.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/keystone-openid-metadata.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/kube-root-ca.crt.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/libvirt-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/libvirt-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/libvirt-libvirt-default-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/libvirt-libvirt-default-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/magnum-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/magnum-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/magnum-cluster-api-proxy-config.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/magnum-cluster-api-proxy-config.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/manila-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/manila-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/memcached-memcached-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/memcached-memcached-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/neutron-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/neutron-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/nova-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/nova-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/octavia-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/octavia-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/openvswitch-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/openvswitch-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/ovn-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/ovn-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/percona-xtradb-haproxy.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/percona-xtradb-haproxy.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/percona-xtradb-pxc.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/percona-xtradb-pxc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/placement-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/placement-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-barbican-plugins-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-barbican-plugins-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-barbican-server-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-barbican-server-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-cinder-plugins-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-cinder-plugins-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-cinder-server-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-cinder-server-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-glance-plugins-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-glance-plugins-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-glance-server-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-glance-server-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-heat-plugins-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-heat-plugins-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-heat-server-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-heat-server-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-keystone-plugins-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-keystone-plugins-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-keystone-server-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-keystone-server-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-magnum-plugins-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-magnum-plugins-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-magnum-server-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-magnum-server-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-manila-plugins-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-manila-plugins-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-manila-server-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-manila-server-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-neutron-plugins-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-neutron-plugins-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-neutron-server-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-neutron-server-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-nova-plugins-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-nova-plugins-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-nova-server-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-nova-server-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-octavia-plugins-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-octavia-plugins-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-octavia-server-conf.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rabbitmq-octavia-server-conf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rook-ceph-mon-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rook-ceph-mon-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rook-ceph-pdbstatemap.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rook-ceph-pdbstatemap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rook-ceph-rgw-ceph-mime-types.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rook-ceph-rgw-ceph-mime-types.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rook-config-override.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/rook-config-override.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/staffeln-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/staffeln-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/tempest-bin.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/tempest-bin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/valkey-configuration.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/valkey-configuration.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/valkey-health.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/valkey-health.yaml",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/valkey-scripts.txt",
                                ">f+++++++++ objects/namespaced/openstack/configmaps/valkey-scripts.yaml",
                                "cd+++++++++ objects/namespaced/openstack/cronjobs/",
                                ">f+++++++++ objects/namespaced/openstack/cronjobs/cinder-volume-usage-audit.txt",
                                ">f+++++++++ objects/namespaced/openstack/cronjobs/cinder-volume-usage-audit.yaml",
                                ">f+++++++++ objects/namespaced/openstack/cronjobs/heat-engine-cleaner.txt",
                                ">f+++++++++ objects/namespaced/openstack/cronjobs/heat-engine-cleaner.yaml",
                                ">f+++++++++ objects/namespaced/openstack/cronjobs/heat-purge-deleted.txt",
                                ">f+++++++++ objects/namespaced/openstack/cronjobs/heat-purge-deleted.yaml",
                                ">f+++++++++ objects/namespaced/openstack/cronjobs/keystone-credential-rotate.txt",
                                ">f+++++++++ objects/namespaced/openstack/cronjobs/keystone-credential-rotate.yaml",
                                ">f+++++++++ objects/namespaced/openstack/cronjobs/keystone-fernet-rotate.txt",
                                ">f+++++++++ objects/namespaced/openstack/cronjobs/keystone-fernet-rotate.yaml",
                                ">f+++++++++ objects/namespaced/openstack/cronjobs/nova-cell-setup.txt",
                                ">f+++++++++ objects/namespaced/openstack/cronjobs/nova-cell-setup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/cronjobs/nova-service-cleaner.txt",
                                ">f+++++++++ objects/namespaced/openstack/cronjobs/nova-service-cleaner.yaml",
                                "cd+++++++++ objects/namespaced/openstack/daemonsets/",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/keepalived.txt",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/keepalived.yaml",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/libvirt-libvirt-default.txt",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/libvirt-libvirt-default.yaml",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/magnum-cluster-api-proxy.txt",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/magnum-cluster-api-proxy.yaml",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/neutron-netns-cleanup-cron-default.txt",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/neutron-netns-cleanup-cron-default.yaml",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/neutron-ovn-metadata-agent-default.txt",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/neutron-ovn-metadata-agent-default.yaml",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/nova-compute-default.txt",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/nova-compute-default.yaml",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/octavia-health-manager-default.txt",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/octavia-health-manager-default.yaml",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/openvswitch.txt",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/openvswitch.yaml",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/ovn-controller.txt",
                                ">f+++++++++ objects/namespaced/openstack/daemonsets/ovn-controller.yaml",
                                "cd+++++++++ objects/namespaced/openstack/deployment/",
                                ">f+++++++++ objects/namespaced/openstack/deployment/barbican-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/barbican-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/cinder-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/cinder-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/cinder-backup.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/cinder-backup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/cinder-scheduler.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/cinder-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/cinder-volume.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/cinder-volume.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/glance-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/glance-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/heat-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/heat-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/heat-cfn.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/heat-cfn.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/heat-engine.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/heat-engine.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/horizon.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/horizon.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/keystone-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/keystone-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/magnum-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/magnum-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/magnum-registry.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/magnum-registry.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/manila-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/manila-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/manila-data.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/manila-data.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/manila-scheduler.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/manila-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/manila-share.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/manila-share.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/memcached-memcached.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/memcached-memcached.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/neutron-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/neutron-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/nova-api-metadata.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/nova-api-metadata.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/nova-api-osapi.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/nova-api-osapi.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/nova-conductor.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/nova-conductor.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/nova-novncproxy.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/nova-novncproxy.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/nova-scheduler.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/nova-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/octavia-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/octavia-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/octavia-housekeeping.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/octavia-housekeeping.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/octavia-worker.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/octavia-worker.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/openstack-database-exporter.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/openstack-database-exporter.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/openstack-exporter.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/openstack-exporter.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/ovn-northd.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/ovn-northd.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/placement-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/placement-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/pxc-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/pxc-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/rabbitmq-cluster-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/rabbitmq-cluster-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/rabbitmq-messaging-topology-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/rabbitmq-messaging-topology-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/rook-ceph-crashcollector-instance.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/rook-ceph-crashcollector-instance.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/rook-ceph-rgw-ceph-a.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/rook-ceph-rgw-ceph-a.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/staffeln-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/staffeln-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/deployment/staffeln-conductor.txt",
                                ">f+++++++++ objects/namespaced/openstack/deployment/staffeln-conductor.yaml",
                                "cd+++++++++ objects/namespaced/openstack/endpoints/",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/barbican-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/barbican-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/ceph-mon.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/ceph-mon.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/cinder-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/cinder-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/glance-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/glance-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/heat-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/heat-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/heat-cfn.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/heat-cfn.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/horizon-int.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/horizon-int.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/horizon.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/horizon.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/keystone-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/keystone-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/magnum-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/magnum-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/magnum-registry.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/magnum-registry.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/manila-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/manila-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/memcached-metrics.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/memcached-metrics.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/memcached.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/memcached.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/neutron-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/neutron-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/nova-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/nova-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/nova-metadata.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/nova-metadata.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/nova-novncproxy.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/nova-novncproxy.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/octavia-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/octavia-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/openstack-exporter.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/openstack-exporter.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/ovn-ovsdb-nb.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/ovn-ovsdb-nb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/ovn-ovsdb-sb.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/ovn-ovsdb-sb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-cluster-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-cluster-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-haproxy-metrics.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-haproxy-metrics.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-haproxy-replicas.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-haproxy-replicas.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-haproxy.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-haproxy.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-pxc-unready.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-pxc-unready.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-pxc.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/percona-xtradb-pxc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/placement-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/placement-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-barbican-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-barbican-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-barbican.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-barbican.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-cinder-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-cinder-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-cinder.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-cinder.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-glance-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-glance-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-glance.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-glance.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-heat-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-heat-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-heat.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-heat.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-keystone-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-keystone-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-keystone.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-keystone.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-magnum-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-magnum-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-magnum.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-magnum.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-manila-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-manila-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-manila.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-manila.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-messaging-topology-operator-webhook.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-messaging-topology-operator-webhook.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-neutron-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-neutron-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-neutron.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-neutron.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-nova-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-nova-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-nova.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-nova.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-octavia-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-octavia-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-octavia.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rabbitmq-octavia.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rook-ceph-rgw-ceph.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/rook-ceph-rgw-ceph.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/staffeln-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/staffeln-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/valkey-headless.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/valkey-headless.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/valkey-metrics.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/valkey-metrics.yaml",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/valkey.txt",
                                ">f+++++++++ objects/namespaced/openstack/endpoints/valkey.yaml",
                                "cd+++++++++ objects/namespaced/openstack/ingresses/",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/cloudformation.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/cloudformation.yaml",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/compute-novnc-proxy.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/compute-novnc-proxy.yaml",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/compute.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/compute.yaml",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/container-infra-registry.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/container-infra-registry.yaml",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/container-infra.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/container-infra.yaml",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/dashboard.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/dashboard.yaml",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/identity.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/identity.yaml",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/image.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/image.yaml",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/key-manager.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/key-manager.yaml",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/load-balancer.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/load-balancer.yaml",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/network.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/network.yaml",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/orchestration.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/orchestration.yaml",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/placement.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/placement.yaml",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/rook-ceph-cluster.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/rook-ceph-cluster.yaml",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/sharev2.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/sharev2.yaml",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/volumev3.txt",
                                ">f+++++++++ objects/namespaced/openstack/ingresses/volumev3.yaml",
                                "cd+++++++++ objects/namespaced/openstack/jobs/",
                                ">f+++++++++ objects/namespaced/openstack/jobs/barbican-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/barbican-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/barbican-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/barbican-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/barbican-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/barbican-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/barbican-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/barbican-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/barbican-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/barbican-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/barbican-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/barbican-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-backup-storage-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-backup-storage-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-create-internal-tenant.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-create-internal-tenant.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-storage-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-storage-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-volume-usage-audit-29540045.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/cinder-volume-usage-audit-29540045.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-metadefs-load.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-metadefs-load.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-storage-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/glance-storage-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-domain-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-domain-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-engine-cleaner-29540055.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-engine-cleaner-29540055.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-engine-cleaner-29540060.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-engine-cleaner-29540060.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-engine-cleaner-29540065.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-engine-cleaner-29540065.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-trusts.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/heat-trusts.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/horizon-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/horizon-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/horizon-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/horizon-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/keystone-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/keystone-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/keystone-credential-setup.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/keystone-credential-setup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/keystone-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/keystone-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/keystone-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/keystone-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/keystone-domain-manage.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/keystone-domain-manage.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/keystone-fernet-setup.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/keystone-fernet-setup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/keystone-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/keystone-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/magnum-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/magnum-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/magnum-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/magnum-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/magnum-domain-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/magnum-domain-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/magnum-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/magnum-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/magnum-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/magnum-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/magnum-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/magnum-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/magnum-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/magnum-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/manila-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/manila-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/manila-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/manila-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/manila-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/manila-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/manila-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/manila-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/manila-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/manila-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/manila-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/manila-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/manila-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/manila-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/neutron-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/neutron-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/neutron-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/neutron-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/neutron-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/neutron-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/neutron-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/neutron-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/neutron-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/neutron-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/neutron-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/neutron-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-cell-setup-29540040.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-cell-setup-29540040.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-service-cleaner-29540040.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/nova-service-cleaner-29540040.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/octavia-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/octavia-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/octavia-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/octavia-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/octavia-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/octavia-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/octavia-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/octavia-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/octavia-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/octavia-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/octavia-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/octavia-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/octavia-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/octavia-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/placement-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/placement-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/placement-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/placement-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/placement-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/placement-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/placement-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/placement-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/placement-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/placement-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/staffeln-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/staffeln-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/staffeln-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/staffeln-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/tempest-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/tempest-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/jobs/tempest-run-tests.txt",
                                ">f+++++++++ objects/namespaced/openstack/jobs/tempest-run-tests.yaml",
                                "cd+++++++++ objects/namespaced/openstack/networkpolicies/",
                                ">f+++++++++ objects/namespaced/openstack/networkpolicies/rabbitmq-cluster-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/networkpolicies/rabbitmq-cluster-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/networkpolicies/rabbitmq-messaging-topology-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/networkpolicies/rabbitmq-messaging-topology-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/networkpolicies/valkey.txt",
                                ">f+++++++++ objects/namespaced/openstack/networkpolicies/valkey.yaml",
                                "cd+++++++++ objects/namespaced/openstack/persistentvolumeclaims/",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/data-ovn-ovsdb-nb-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/data-ovn-ovsdb-nb-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/data-ovn-ovsdb-sb-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/data-ovn-ovsdb-sb-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/datadir-percona-xtradb-pxc-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/datadir-percona-xtradb-pxc-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-barbican-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-barbican-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-cinder-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-cinder-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-glance-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-glance-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-heat-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-heat-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-keystone-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-keystone-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-magnum-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-magnum-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-manila-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-manila-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-neutron-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-neutron-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-nova-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-nova-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-octavia-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/persistence-rabbitmq-octavia-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/valkey-data-valkey-node-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/persistentvolumeclaims/valkey-data-valkey-node-0.yaml",
                                "cd+++++++++ objects/namespaced/openstack/pods/",
                                ">f+++++++++ objects/namespaced/openstack/pods/barbican-api-775987496d-z6jqv.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/barbican-api-775987496d-z6jqv.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/barbican-db-init-nm8k6.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/barbican-db-init-nm8k6.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/barbican-db-sync-452x5.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/barbican-db-sync-452x5.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/barbican-ks-endpoints-w2ffg.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/barbican-ks-endpoints-w2ffg.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/barbican-ks-service-8pm7j.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/barbican-ks-service-8pm7j.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/barbican-ks-user-fszfr.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/barbican-ks-user-fszfr.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/barbican-rabbit-init-j5qmd.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/barbican-rabbit-init-j5qmd.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-api-86d7694f66-j97gj.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-api-86d7694f66-j97gj.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-backup-dcfd7dfb7-sdwkc.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-backup-dcfd7dfb7-sdwkc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-backup-storage-init-zmnkh.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-backup-storage-init-zmnkh.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-bootstrap-wng86.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-bootstrap-wng86.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-create-internal-tenant-6vgll.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-create-internal-tenant-6vgll.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-db-init-mzm5b.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-db-init-mzm5b.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-db-sync-mz6ls.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-db-sync-mz6ls.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-ks-endpoints-xv2tb.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-ks-endpoints-xv2tb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-ks-service-dlcxz.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-ks-service-dlcxz.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-ks-user-5bd5g.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-ks-user-5bd5g.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-rabbit-init-l4fpm.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-rabbit-init-l4fpm.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-scheduler-586f444995-p7grf.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-scheduler-586f444995-p7grf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-storage-init-vt6br.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-storage-init-vt6br.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-volume-66dc847979-qgp4l.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-volume-66dc847979-qgp4l.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-volume-usage-audit-29540045-jbmvh.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/cinder-volume-usage-audit-29540045-jbmvh.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-api-65d579bfc8-6x76l.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-api-65d579bfc8-6x76l.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-db-init-wbpff.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-db-init-wbpff.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-db-sync-gk84f.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-db-sync-gk84f.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-ks-endpoints-dq2cc.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-ks-endpoints-dq2cc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-ks-service-5h6bw.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-ks-service-5h6bw.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-ks-user-lcfxr.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-ks-user-lcfxr.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-metadefs-load-476tp.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-metadefs-load-476tp.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-rabbit-init-c6rjt.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-rabbit-init-c6rjt.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-storage-init-hdcpc.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/glance-storage-init-hdcpc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-api-6d65f9477-kmbkl.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-api-6d65f9477-kmbkl.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-bootstrap-9dwg2.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-bootstrap-9dwg2.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-cfn-f44db7787-t8f7m.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-cfn-f44db7787-t8f7m.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-db-init-fk8qw.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-db-init-fk8qw.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-db-sync-cxmcb.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-db-sync-cxmcb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-domain-ks-user-tq2c5.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-domain-ks-user-tq2c5.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-engine-64f8b77bfb-wngkr.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-engine-64f8b77bfb-wngkr.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-engine-cleaner-29540055-p9pq9.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-engine-cleaner-29540055-p9pq9.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-engine-cleaner-29540060-z4g95.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-engine-cleaner-29540060-z4g95.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-engine-cleaner-29540065-rcjr2.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-engine-cleaner-29540065-rcjr2.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-ks-endpoints-wwzbz.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-ks-endpoints-wwzbz.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-ks-service-8pxqz.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-ks-service-8pxqz.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-ks-user-tfk98.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-ks-user-tfk98.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-rabbit-init-rbl9n.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-rabbit-init-rbl9n.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-trusts-czrrv.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/heat-trusts-czrrv.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/horizon-8cdd7b888-bvzvx.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/horizon-8cdd7b888-bvzvx.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/horizon-db-init-s5pbw.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/horizon-db-init-s5pbw.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/horizon-db-sync-bgr2g.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/horizon-db-sync-bgr2g.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/keepalived-7jdfz.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/keepalived-7jdfz.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-api-c4656754c-mqbxm.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-api-c4656754c-mqbxm.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-bootstrap-mdtrx.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-bootstrap-mdtrx.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-credential-setup-6xsvx.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-credential-setup-6xsvx.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-db-init-z5mwz.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-db-init-z5mwz.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-db-sync-zsq8z.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-db-sync-zsq8z.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-domain-manage-v865d.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-domain-manage-v865d.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-fernet-setup-5rfqs.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-fernet-setup-5rfqs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-rabbit-init-m44qz.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/keystone-rabbit-init-m44qz.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/libvirt-libvirt-default-6bgrg.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/libvirt-libvirt-default-6bgrg.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-api-8549df7884-9b2zc.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-api-8549df7884-9b2zc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-cluster-api-proxy-z2flh.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-cluster-api-proxy-z2flh.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-conductor-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-conductor-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-db-init-dshrc.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-db-init-dshrc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-db-sync-8ttpk.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-db-sync-8ttpk.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-domain-ks-user-vp8f2.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-domain-ks-user-vp8f2.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-ks-endpoints-jvzvf.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-ks-endpoints-jvzvf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-ks-service-vdn67.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-ks-service-vdn67.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-ks-user-4wvtj.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-ks-user-4wvtj.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-rabbit-init-w7jc7.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-rabbit-init-w7jc7.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-registry-c45778976-2zz96.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/magnum-registry-c45778976-2zz96.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-api-5cdf958bd9-hmbmb.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-api-5cdf958bd9-hmbmb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-bootstrap-5wn97.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-bootstrap-5wn97.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-data-75cbc955bd-27jjw.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-data-75cbc955bd-27jjw.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-db-init-pbdm8.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-db-init-pbdm8.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-db-sync-rm9mz.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-db-sync-rm9mz.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-ks-endpoints-d8nr9.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-ks-endpoints-d8nr9.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-ks-service-g7svt.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-ks-service-g7svt.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-ks-user-pr9mg.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-ks-user-pr9mg.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-rabbit-init-74vjs.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-rabbit-init-74vjs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-scheduler-5b584c8656-mmnnd.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-scheduler-5b584c8656-mmnnd.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-share-68879775b-rc6q9.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/manila-share-68879775b-rc6q9.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/memcached-memcached-6479589586-9sxjx.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/memcached-memcached-6479589586-9sxjx.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-db-init-l7c9v.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-db-init-l7c9v.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-db-sync-brwb5.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-db-sync-brwb5.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-ks-endpoints-dstkg.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-ks-endpoints-dstkg.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-ks-service-sq4tp.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-ks-service-sq4tp.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-ks-user-kcfc4.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-ks-user-kcfc4.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-netns-cleanup-cron-default-8frwf.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-netns-cleanup-cron-default-8frwf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-ovn-metadata-agent-default-flhb5.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-ovn-metadata-agent-default-flhb5.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-rabbit-init-rdnbf.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-rabbit-init-rdnbf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-server-649c5974f6-5dkvl.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/neutron-server-649c5974f6-5dkvl.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-api-metadata-546d94ddd7-btnrc.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-api-metadata-546d94ddd7-btnrc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-api-osapi-99c7b7cd8-2lnzr.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-api-osapi-99c7b7cd8-2lnzr.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-bootstrap-trzqq.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-bootstrap-trzqq.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-cell-setup-29540040-rtzd7.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-cell-setup-29540040-rtzd7.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-cell-setup-j97qh.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-cell-setup-j97qh.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-compute-default-2v5pd.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-compute-default-2v5pd.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-conductor-5474cb4b8d-bxzhq.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-conductor-5474cb4b8d-bxzhq.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-db-init-b4sqh.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-db-init-b4sqh.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-db-sync-2rbjc.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-db-sync-2rbjc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-ks-endpoints-zwcm6.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-ks-endpoints-zwcm6.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-ks-service-fmj77.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-ks-service-fmj77.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-ks-user-t8xgz.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-ks-user-t8xgz.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-novncproxy-85dd5b5965-z6hmj.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-novncproxy-85dd5b5965-z6hmj.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-rabbit-init-szpvx.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-rabbit-init-szpvx.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-scheduler-78775555d4-hb2j9.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-scheduler-78775555d4-hb2j9.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-service-cleaner-29540040-cxdd4.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/nova-service-cleaner-29540040-cxdd4.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-api-75db6578cf-m656r.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-api-75db6578cf-m656r.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-bootstrap-kwfv2.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-bootstrap-kwfv2.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-db-init-wnz5h.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-db-init-wnz5h.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-db-sync-rjq45.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-db-sync-rjq45.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-health-manager-default-twmks.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-health-manager-default-twmks.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-housekeeping-87b98c47b-vqwct.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-housekeeping-87b98c47b-vqwct.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-ks-endpoints-jdlzw.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-ks-endpoints-jdlzw.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-ks-service-rkdp9.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-ks-service-rkdp9.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-ks-user-tjl52.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-ks-user-tjl52.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-rabbit-init-vdqxf.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-rabbit-init-vdqxf.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-worker-774cddbcdc-qxl6k.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/octavia-worker-774cddbcdc-qxl6k.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/openstack-database-exporter-7c944bc9f-w2bdb.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/openstack-database-exporter-7c944bc9f-w2bdb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/openstack-exporter-74676fb4b4-jrkwh.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/openstack-exporter-74676fb4b4-jrkwh.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/openvswitch-gj98d.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/openvswitch-gj98d.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/ovn-controller-6mbd4.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/ovn-controller-6mbd4.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/ovn-northd-6c6687ddd6-7grhs.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/ovn-northd-6c6687ddd6-7grhs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/ovn-ovsdb-nb-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/ovn-ovsdb-nb-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/ovn-ovsdb-sb-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/ovn-ovsdb-sb-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/percona-xtradb-haproxy-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/percona-xtradb-haproxy-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/percona-xtradb-pxc-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/percona-xtradb-pxc-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/placement-api-75695696c6-brsxj.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/placement-api-75695696c6-brsxj.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/placement-db-init-89t92.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/placement-db-init-89t92.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/placement-db-sync-nvqjv.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/placement-db-sync-nvqjv.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/placement-ks-endpoints-jmfl7.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/placement-ks-endpoints-jmfl7.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/placement-ks-service-qdjdz.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/placement-ks-service-qdjdz.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/placement-ks-user-blkn9.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/placement-ks-user-blkn9.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/pxc-operator-7cff949c8b-7zp4j.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/pxc-operator-7cff949c8b-7zp4j.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-barbican-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-barbican-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-cinder-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-cinder-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-cluster-operator-5448d56d95-vk9km.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-cluster-operator-5448d56d95-vk9km.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-glance-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-glance-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-heat-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-heat-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-keystone-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-keystone-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-magnum-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-magnum-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-manila-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-manila-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-messaging-topology-operator-7f8596f788-84l9x.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-messaging-topology-operator-7f8596f788-84l9x.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-neutron-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-neutron-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-nova-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-nova-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-octavia-server-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/rabbitmq-octavia-server-0.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/rook-ceph-crashcollector-instance-754c646bfd-htxl9.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/rook-ceph-crashcollector-instance-754c646bfd-htxl9.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/staffeln-api-6669c8779f-qgp4c.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/staffeln-api-6669c8779f-qgp4c.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/staffeln-conductor-7b5d99bcd4-ws4sl.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/staffeln-conductor-7b5d99bcd4-ws4sl.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/staffeln-db-init-p4pq4.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/staffeln-db-init-p4pq4.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/staffeln-db-sync-khzx8.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/staffeln-db-sync-khzx8.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/tempest-ks-user-kwbf6.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/tempest-ks-user-kwbf6.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/tempest-run-tests-g5plh.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/tempest-run-tests-g5plh.yaml",
                                ">f+++++++++ objects/namespaced/openstack/pods/valkey-node-0.txt",
                                ">f+++++++++ objects/namespaced/openstack/pods/valkey-node-0.yaml",
                                "cd+++++++++ objects/namespaced/openstack/rolebindings/",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/barbican-barbican-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-backup-storage-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-backup-storage-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-backup.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-backup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-create-internal-tenant.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-create-internal-tenant.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-scheduler.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-volume-usage-audit.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-volume-usage-audit.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-volume.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-cinder-volume.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-storage-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/cinder-storage-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-metadefs-load.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-metadefs-load.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-storage-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-storage-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-glance-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-storage-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/glance-storage-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-cfn.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-cfn.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-engine-cleaner.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-engine-cleaner.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-engine.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-engine.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-user-domain.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-user-domain.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-purge-deleted.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-purge-deleted.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-trusts.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/heat-heat-trusts.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/horizon-horizon.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keepalived.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keepalived.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-credential-rotate.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-credential-rotate.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-credential-setup.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-credential-setup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-fernet-rotate.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-fernet-rotate.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-fernet-setup.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-fernet-setup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-credential-rotate.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-credential-rotate.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-domain-manage.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-domain-manage.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-fernet-rotate.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-fernet-rotate.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/keystone-keystone-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/libvirt-cert-manager.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/libvirt-cert-manager.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/libvirt-libvirt.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/libvirt-libvirt.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-conductor.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-conductor.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-user-domain.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-user-domain.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/magnum-magnum-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-data.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-data.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-scheduler.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-share.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/manila-manila-share.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ovn-metadata-agent.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-ovn-metadata-agent.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/neutron-neutron-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-api-metadata.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-api-metadata.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-api-osapi.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-api-osapi.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-cell-setup-cron.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-cell-setup-cron.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-cell-setup.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-cell-setup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-compute.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-compute.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-conductor.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-conductor.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-novncproxy.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-novncproxy.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-scheduler.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-service-cleaner.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-service-cleaner.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/nova-nova-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-health-manager.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-health-manager.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-housekeeping.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-housekeeping.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-worker.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/octavia-octavia-worker.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/ovn-controller.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/ovn-controller.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/ovn-northd.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/ovn-northd.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/ovn-ovn-controller.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/ovn-ovn-controller.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/ovn-ovn-northd.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/ovn-ovn-northd.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/ovn-ovsdb.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/ovn-ovsdb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/placement-placement-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/pxc-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/pxc-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-barbican-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-barbican-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-cinder-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-cinder-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-cluster-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-cluster-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-glance-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-glance-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-heat-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-heat-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-keystone-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-keystone-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-magnum-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-magnum-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-manila-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-manila-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-messaging-topology-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-messaging-topology-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-neutron-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-neutron-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-nova-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-nova-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-octavia-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rabbitmq-octavia-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-cluster-mgmt.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-cluster-mgmt.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-cmd-reporter.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-cmd-reporter.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-mgr.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-mgr.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-osd.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-osd.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-purge-osd.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/rook-ceph-purge-osd.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-conductor-leases.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-conductor-leases.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-conductor.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-conductor.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/staffeln-staffeln-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/tempest-tempest-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/tempest-tempest-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/tempest-tempest-run-tests.txt",
                                ">f+++++++++ objects/namespaced/openstack/rolebindings/tempest-tempest-run-tests.yaml",
                                "cd+++++++++ objects/namespaced/openstack/roles/",
                                ">f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/barbican-openstack-barbican-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-backup-storage-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-backup-storage-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-backup.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-backup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-create-internal-tenant.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-create-internal-tenant.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-scheduler.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-volume-usage-audit.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-volume-usage-audit.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-volume.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-openstack-cinder-volume.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-storage-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/cinder-storage-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-metadefs-load.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-metadefs-load.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-storage-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-storage-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-openstack-glance-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-storage-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/glance-storage-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-cfn.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-cfn.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-engine-cleaner.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-engine-cleaner.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-engine.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-engine.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-user-domain.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-user-domain.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-purge-deleted.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-purge-deleted.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-trusts.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/heat-openstack-heat-trusts.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/horizon-openstack-horizon.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/keepalived.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/keepalived.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-credential-rotate.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-credential-rotate.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-credential-setup.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-credential-setup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-fernet-rotate.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-fernet-rotate.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-fernet-setup.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-fernet-setup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-credential-rotate.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-credential-rotate.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-domain-manage.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-domain-manage.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-fernet-rotate.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-fernet-rotate.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/keystone-openstack-keystone-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/libvirt-cert-manager.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/libvirt-cert-manager.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/libvirt-openstack-libvirt.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/libvirt-openstack-libvirt.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-conductor.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-conductor.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-user-domain.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-user-domain.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/magnum-openstack-magnum-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-data.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-data.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-scheduler.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-share.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/manila-openstack-manila-share.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ovn-metadata-agent.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-ovn-metadata-agent.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/neutron-openstack-neutron-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-api-metadata.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-api-metadata.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-api-osapi.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-api-osapi.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-cell-setup-cron.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-cell-setup-cron.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-cell-setup.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-cell-setup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-compute.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-compute.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-conductor.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-conductor.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-novncproxy.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-novncproxy.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-scheduler.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-service-cleaner.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-service-cleaner.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/nova-openstack-nova-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-health-manager.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-health-manager.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-housekeeping.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-housekeeping.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-worker.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/octavia-openstack-octavia-worker.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/ovn-controller.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/ovn-controller.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/ovn-northd.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/ovn-northd.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/ovn-openstack-ovn-controller.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/ovn-openstack-ovn-controller.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/ovn-openstack-ovn-northd.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/ovn-openstack-ovn-northd.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/ovn-ovsdb.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/ovn-ovsdb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/placement-openstack-placement-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/pxc-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/pxc-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-barbican-peer-discovery.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-barbican-peer-discovery.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-cinder-peer-discovery.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-cinder-peer-discovery.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-cluster-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-cluster-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-glance-peer-discovery.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-glance-peer-discovery.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-heat-peer-discovery.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-heat-peer-discovery.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-keystone-peer-discovery.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-keystone-peer-discovery.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-magnum-peer-discovery.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-magnum-peer-discovery.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-manila-peer-discovery.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-manila-peer-discovery.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-messaging-topology-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-messaging-topology-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-neutron-peer-discovery.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-neutron-peer-discovery.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-nova-peer-discovery.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-nova-peer-discovery.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-octavia-peer-discovery.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rabbitmq-octavia-peer-discovery.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rook-ceph-cmd-reporter.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rook-ceph-cmd-reporter.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rook-ceph-mgr.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rook-ceph-mgr.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rook-ceph-osd.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rook-ceph-osd.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/rook-ceph-purge-osd.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/rook-ceph-purge-osd.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/staffeln-conductor.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/staffeln-conductor.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-conductor.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-conductor.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/staffeln-openstack-staffeln-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/tempest-openstack-tempest-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/tempest-openstack-tempest-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/roles/tempest-openstack-tempest-run-tests.txt",
                                ">f+++++++++ objects/namespaced/openstack/roles/tempest-openstack-tempest-run-tests.yaml",
                                "cd+++++++++ objects/namespaced/openstack/secrets/",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-api-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-api-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-db-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-db-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-db-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-db-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-keystone-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-keystone-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-keystone-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-keystone-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-rabbitmq-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-rabbitmq-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-rabbitmq-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/barbican-rabbitmq-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-api-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-api-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-backup-rbd-keyring.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-backup-rbd-keyring.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-db-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-db-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-db-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-db-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-keystone-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-keystone-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-keystone-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-keystone-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-keystone-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-keystone-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-rabbitmq-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-rabbitmq-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-rabbitmq-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-rabbitmq-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-volume-rbd-keyring.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/cinder-volume-rbd-keyring.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-api-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-api-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-db-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-db-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-db-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-db-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-keystone-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-keystone-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-keystone-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-keystone-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-keystone-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-keystone-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-rabbitmq-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-rabbitmq-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-rabbitmq-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/glance-rabbitmq-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-api-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-api-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-cfn-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-cfn-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-db-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-db-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-db-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-db-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-stack-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-stack-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-trustee.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-trustee.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-keystone-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-rabbitmq-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-rabbitmq-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-rabbitmq-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/heat-rabbitmq-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/horizon-db-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/horizon-db-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/horizon-db-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/horizon-db-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/horizon-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/horizon-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/horizon-int-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/horizon-int-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/horizon-keystone-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/horizon-keystone-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/images-rbd-keyring.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/images-rbd-keyring.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/internal-percona-xtradb.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/internal-percona-xtradb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keepalived-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keepalived-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-api-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-api-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-credential-keys.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-credential-keys.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-db-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-db-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-db-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-db-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-fernet-keys.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-fernet-keys.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-keystone-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-keystone-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-keystone-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-keystone-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-rabbitmq-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-rabbitmq-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-rabbitmq-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/keystone-rabbitmq-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/libvirt-api-ca.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/libvirt-api-ca.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/libvirt-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/libvirt-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/libvirt-libvirt-default-6bgrg-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/libvirt-libvirt-default-6bgrg-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/libvirt-libvirt-default-6bgrg-vnc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/libvirt-libvirt-default-6bgrg-vnc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/libvirt-libvirt-default-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/libvirt-libvirt-default-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/libvirt-vnc-ca.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/libvirt-vnc-ca.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-api-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-api-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-db-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-db-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-db-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-db-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-keystone-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-keystone-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-keystone-stack-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-keystone-stack-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-keystone-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-keystone-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-rabbitmq-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-rabbitmq-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-rabbitmq-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-rabbitmq-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-registry-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/magnum-registry-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-api-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-api-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-db-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-db-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-db-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-db-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-keystone-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-keystone-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-keystone-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-keystone-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-rabbitmq-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-rabbitmq-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-rabbitmq-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-rabbitmq-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-ssh-keys.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/manila-ssh-keys.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-db-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-db-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-db-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-db-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-keystone-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-keystone-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-keystone-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-keystone-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-keystone-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-keystone-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-netns-cleanup-cron-default.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-netns-cleanup-cron-default.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-ovn-metadata-agent-default.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-ovn-metadata-agent-default.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-rabbitmq-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-rabbitmq-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-rabbitmq-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-rabbitmq-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-server-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/neutron-server-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-api-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-api-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-compute-default.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-compute-default.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-db-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-db-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-db-api-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-db-api-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-db-api-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-db-api-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-db-cell0-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-db-cell0-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-db-cell0-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-db-cell0-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-db-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-db-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-keystone-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-keystone-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-keystone-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-keystone-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-keystone-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-keystone-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-novncproxy-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-novncproxy-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-novncproxy-vencrypt.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-novncproxy-vencrypt.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-rabbitmq-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-rabbitmq-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-rabbitmq-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-rabbitmq-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-ssh.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/nova-ssh.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-amphora-ssh-key.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-amphora-ssh-key.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-api-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-api-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-client-ca.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-client-ca.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-client-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-client-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-db-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-db-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-db-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-db-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-health-manager-default.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-health-manager-default.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-keystone-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-keystone-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-keystone-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-keystone-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-keystone-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-keystone-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-persistence-db-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-persistence-db-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-persistence-db-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-persistence-db-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-rabbitmq-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-rabbitmq-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-rabbitmq-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-rabbitmq-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-server-ca.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/octavia-server-ca.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/openstack-database-exporter-dsn.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/openstack-database-exporter-dsn.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/ovn-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/ovn-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/ovn-vector-config.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/ovn-vector-config.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/percona-xtradb.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/percona-xtradb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/placement-api-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/placement-api-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/placement-db-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/placement-db-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/placement-db-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/placement-db-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/placement-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/placement-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/placement-keystone-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/placement-keystone-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/placement-keystone-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/placement-keystone-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/pvc-ceph-client-key.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/pvc-ceph-client-key.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-barbican-default-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-barbican-default-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-barbican-erlang-cookie.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-barbican-erlang-cookie.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-cinder-default-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-cinder-default-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-cinder-erlang-cookie.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-cinder-erlang-cookie.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-glance-default-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-glance-default-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-glance-erlang-cookie.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-glance-erlang-cookie.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-heat-default-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-heat-default-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-heat-erlang-cookie.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-heat-erlang-cookie.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-keystone-default-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-keystone-default-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-keystone-erlang-cookie.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-keystone-erlang-cookie.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-magnum-default-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-magnum-default-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-magnum-erlang-cookie.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-magnum-erlang-cookie.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-manila-default-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-manila-default-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-manila-erlang-cookie.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-manila-erlang-cookie.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-messaging-topology-operator-webhook.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-messaging-topology-operator-webhook.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-neutron-default-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-neutron-default-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-neutron-erlang-cookie.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-neutron-erlang-cookie.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-nova-default-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-nova-default-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-nova-erlang-cookie.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-nova-erlang-cookie.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-octavia-default-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-octavia-default-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-octavia-erlang-cookie.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rabbitmq-octavia-erlang-cookie.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-config.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-config.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-crash-collector-keyring.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-crash-collector-keyring.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-mon.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-mon.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-object-user-ceph-cosi.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-object-user-ceph-cosi.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-rgw-ceph-a-keyring.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-rgw-ceph-a-keyring.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-rgw-ceph-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-ceph-rgw-ceph-certs.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-csi-cephfs-node.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-csi-cephfs-node.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-csi-cephfs-provisioner.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-csi-cephfs-provisioner.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-csi-rbd-node.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-csi-rbd-node.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-csi-rbd-provisioner.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/rook-csi-rbd-provisioner.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.barbican.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.barbican.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.ceph-provisioners.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.ceph-provisioners.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.ceph.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.ceph.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.cinder.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.cinder.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.glance.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.glance.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.heat.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.heat.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.horizon.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.horizon.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.keystone.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.keystone.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.libvirt.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.libvirt.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.magnum.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.magnum.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.manila.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.manila.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.memcached.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.memcached.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.neutron.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.neutron.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.nova.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.nova.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.octavia.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.octavia.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.openvswitch.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.openvswitch.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.ovn.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.ovn.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.placement.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.placement.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.pxc-operator.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.pxc-operator.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.rabbitmq-cluster-operator.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.rabbitmq-cluster-operator.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.staffeln.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.staffeln.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.tempest.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.tempest.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.valkey.v1.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/sh.helm.release.v1.valkey.v1.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/staffeln-db-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/staffeln-db-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/staffeln-db-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/staffeln-db-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/staffeln-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/staffeln-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/tempest-etc.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/tempest-etc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/tempest-keystone-admin.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/tempest-keystone-admin.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/tempest-keystone-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/tempest-keystone-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/valkey-ca.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/valkey-ca.yaml",
                                ">f+++++++++ objects/namespaced/openstack/secrets/valkey-server-certs.txt",
                                ">f+++++++++ objects/namespaced/openstack/secrets/valkey-server-certs.yaml",
                                "cd+++++++++ objects/namespaced/openstack/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/barbican-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-backup-storage-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-backup-storage-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-backup.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-backup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-create-internal-tenant.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-create-internal-tenant.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-scheduler.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-storage-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-storage-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-volume-usage-audit.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-volume-usage-audit.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-volume.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/cinder-volume.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/default.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-metadefs-load.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-metadefs-load.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-storage-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-storage-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/glance-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-cfn.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-cfn.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-engine-cleaner.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-engine-cleaner.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-engine.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-engine.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-user-domain.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-user-domain.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-purge-deleted.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-purge-deleted.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-trusts.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/heat-trusts.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/horizon.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keepalived.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keepalived.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-credential-rotate.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-credential-rotate.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-credential-setup.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-credential-setup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-domain-manage.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-domain-manage.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-fernet-rotate.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-fernet-rotate.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-fernet-setup.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-fernet-setup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/keystone-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/libvirt.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/libvirt.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-conductor.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-conductor.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-user-domain.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-user-domain.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/magnum-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-data.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-data.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-scheduler.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-share.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/manila-share.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/memcached-memcached.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/memcached-memcached.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-netns-cleanup-cron.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-netns-cleanup-cron.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ovn-metadata-agent.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-ovn-metadata-agent.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/neutron-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-api-metadata.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-api-metadata.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-api-osapi.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-api-osapi.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-cell-setup-cron.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-cell-setup-cron.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-cell-setup.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-cell-setup.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-compute.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-compute.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-conductor.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-conductor.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-novncproxy.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-novncproxy.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-scheduler.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-scheduler.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-service-cleaner.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-service-cleaner.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-test.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/nova-test.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-bootstrap.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-bootstrap.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-health-manager.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-health-manager.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-housekeeping.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-housekeeping.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-rabbit-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-rabbit-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-worker.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/octavia-worker.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/openvswitch-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/openvswitch-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-controller.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-controller.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-northd.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-northd.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-ovsdb-nb.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-ovsdb-nb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-ovsdb-sb.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/ovn-ovsdb-sb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-ks-endpoints.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-ks-endpoints.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-ks-service.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-ks-service.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/placement-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/pxc-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/pxc-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-barbican-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-barbican-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-cinder-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-cinder-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-cluster-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-cluster-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-glance-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-glance-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-heat-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-heat-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-keystone-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-keystone-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-magnum-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-magnum-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-manila-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-manila-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-messaging-topology-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-messaging-topology-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-neutron-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-neutron-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-nova-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-nova-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-octavia-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rabbitmq-octavia-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-cmd-reporter.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-cmd-reporter.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-default.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-default.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-mgr.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-mgr.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-osd.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-osd.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-purge-osd.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-purge-osd.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-rgw.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/rook-ceph-rgw.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-conductor.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-conductor.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-db-init.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-db-init.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-db-sync.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/staffeln-db-sync.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/tempest-ks-user.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/tempest-ks-user.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/tempest-run-tests.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/tempest-run-tests.yaml",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/valkey.txt",
                                ">f+++++++++ objects/namespaced/openstack/serviceaccounts/valkey.yaml",
                                "cd+++++++++ objects/namespaced/openstack/services/",
                                ">f+++++++++ objects/namespaced/openstack/services/barbican-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/barbican-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/ceph-mon.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/ceph-mon.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/cinder-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/cinder-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/glance-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/glance-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/heat-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/heat-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/heat-cfn.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/heat-cfn.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/horizon-int.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/horizon-int.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/horizon.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/horizon.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/keystone-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/keystone-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/magnum-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/magnum-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/magnum-registry.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/magnum-registry.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/manila-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/manila-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/memcached-metrics.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/memcached-metrics.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/memcached.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/memcached.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/neutron-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/neutron-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/nova-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/nova-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/nova-metadata.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/nova-metadata.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/nova-novncproxy.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/nova-novncproxy.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/octavia-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/octavia-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/openstack-exporter.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/openstack-exporter.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/ovn-ovsdb-nb.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/ovn-ovsdb-nb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/ovn-ovsdb-sb.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/ovn-ovsdb-sb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/percona-xtradb-cluster-operator.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/percona-xtradb-cluster-operator.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/percona-xtradb-haproxy-metrics.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/percona-xtradb-haproxy-metrics.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/percona-xtradb-haproxy-replicas.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/percona-xtradb-haproxy-replicas.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/percona-xtradb-haproxy.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/percona-xtradb-haproxy.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/percona-xtradb-pxc-unready.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/percona-xtradb-pxc-unready.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/percona-xtradb-pxc.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/percona-xtradb-pxc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/placement-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/placement-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-barbican-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-barbican-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-barbican.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-barbican.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-cinder-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-cinder-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-cinder.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-cinder.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-glance-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-glance-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-glance.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-glance.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-heat-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-heat-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-heat.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-heat.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-keystone-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-keystone-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-keystone.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-keystone.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-magnum-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-magnum-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-magnum.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-magnum.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-manila-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-manila-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-manila.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-manila.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-messaging-topology-operator-webhook.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-messaging-topology-operator-webhook.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-neutron-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-neutron-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-neutron.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-neutron.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-nova-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-nova-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-nova.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-nova.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-octavia-nodes.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-octavia-nodes.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-octavia.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rabbitmq-octavia.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/rook-ceph-rgw-ceph.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/rook-ceph-rgw-ceph.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/staffeln-api.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/staffeln-api.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/valkey-headless.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/valkey-headless.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/valkey-metrics.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/valkey-metrics.yaml",
                                ">f+++++++++ objects/namespaced/openstack/services/valkey.txt",
                                ">f+++++++++ objects/namespaced/openstack/services/valkey.yaml",
                                "cd+++++++++ objects/namespaced/openstack/statefulsets/",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/magnum-conductor.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/magnum-conductor.yaml",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/ovn-ovsdb-nb.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/ovn-ovsdb-nb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/ovn-ovsdb-sb.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/ovn-ovsdb-sb.yaml",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/percona-xtradb-haproxy.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/percona-xtradb-haproxy.yaml",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/percona-xtradb-pxc.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/percona-xtradb-pxc.yaml",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-barbican-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-barbican-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-cinder-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-cinder-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-glance-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-glance-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-heat-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-heat-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-keystone-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-keystone-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-magnum-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-magnum-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-manila-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-manila-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-neutron-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-neutron-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-nova-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-nova-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-octavia-server.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/rabbitmq-octavia-server.yaml",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/valkey-node.txt",
                                ">f+++++++++ objects/namespaced/openstack/statefulsets/valkey-node.yaml",
                                "cd+++++++++ objects/namespaced/orc-system/",
                                "cd+++++++++ objects/namespaced/orc-system/configmaps/",
                                ">f+++++++++ objects/namespaced/orc-system/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/orc-system/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/orc-system/deployment/",
                                ">f+++++++++ objects/namespaced/orc-system/deployment/orc-controller-manager.txt",
                                ">f+++++++++ objects/namespaced/orc-system/deployment/orc-controller-manager.yaml",
                                "cd+++++++++ objects/namespaced/orc-system/endpoints/",
                                ">f+++++++++ objects/namespaced/orc-system/endpoints/orc-controller-manager-metrics-service.txt",
                                ">f+++++++++ objects/namespaced/orc-system/endpoints/orc-controller-manager-metrics-service.yaml",
                                "cd+++++++++ objects/namespaced/orc-system/pods/",
                                ">f+++++++++ objects/namespaced/orc-system/pods/orc-controller-manager-6cb597b5d4-glhcz.txt",
                                ">f+++++++++ objects/namespaced/orc-system/pods/orc-controller-manager-6cb597b5d4-glhcz.yaml",
                                "cd+++++++++ objects/namespaced/orc-system/rolebindings/",
                                ">f+++++++++ objects/namespaced/orc-system/rolebindings/orc-leader-election-rolebinding.txt",
                                ">f+++++++++ objects/namespaced/orc-system/rolebindings/orc-leader-election-rolebinding.yaml",
                                "cd+++++++++ objects/namespaced/orc-system/roles/",
                                ">f+++++++++ objects/namespaced/orc-system/roles/orc-leader-election-role.txt",
                                ">f+++++++++ objects/namespaced/orc-system/roles/orc-leader-election-role.yaml",
                                "cd+++++++++ objects/namespaced/orc-system/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/orc-system/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/orc-system/serviceaccounts/default.yaml",
                                ">f+++++++++ objects/namespaced/orc-system/serviceaccounts/orc-controller-manager.txt",
                                ">f+++++++++ objects/namespaced/orc-system/serviceaccounts/orc-controller-manager.yaml",
                                "cd+++++++++ objects/namespaced/orc-system/services/",
                                ">f+++++++++ objects/namespaced/orc-system/services/orc-controller-manager-metrics-service.txt",
                                ">f+++++++++ objects/namespaced/orc-system/services/orc-controller-manager-metrics-service.yaml",
                                "cd+++++++++ objects/namespaced/rook-ceph/",
                                "cd+++++++++ objects/namespaced/rook-ceph/configmaps/",
                                ">f+++++++++ objects/namespaced/rook-ceph/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/configmaps/kube-root-ca.crt.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/configmaps/rook-ceph-csi-config.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/configmaps/rook-ceph-csi-config.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/configmaps/rook-ceph-csi-mapping-config.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/configmaps/rook-ceph-csi-mapping-config.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/configmaps/rook-ceph-operator-config.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/configmaps/rook-ceph-operator-config.yaml",
                                "cd+++++++++ objects/namespaced/rook-ceph/deployment/",
                                ">f+++++++++ objects/namespaced/rook-ceph/deployment/rook-ceph-operator.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/deployment/rook-ceph-operator.yaml",
                                "cd+++++++++ objects/namespaced/rook-ceph/pods/",
                                ">f+++++++++ objects/namespaced/rook-ceph/pods/rook-ceph-operator-7b66cfb94c-tj94j.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/pods/rook-ceph-operator-7b66cfb94c-tj94j.yaml",
                                "cd+++++++++ objects/namespaced/rook-ceph/rolebindings/",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/cephfs-csi-provisioner-role-cfg.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/cephfs-csi-provisioner-role-cfg.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rbd-csi-provisioner-role-cfg.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rbd-csi-provisioner-role-cfg.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-cluster-mgmt.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-cluster-mgmt.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-cmd-reporter.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-cmd-reporter.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-mgr-system-openstack.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-mgr-system-openstack.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-mgr-system.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-mgr-system.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-mgr.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-mgr.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-osd.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-osd.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-purge-osd.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-purge-osd.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-system.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/rolebindings/rook-ceph-system.yaml",
                                "cd+++++++++ objects/namespaced/rook-ceph/roles/",
                                ">f+++++++++ objects/namespaced/rook-ceph/roles/cephfs-external-provisioner-cfg.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/roles/cephfs-external-provisioner-cfg.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/roles/rbd-external-provisioner-cfg.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/roles/rbd-external-provisioner-cfg.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-cmd-reporter.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-cmd-reporter.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-mgr.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-mgr.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-osd.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-osd.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-purge-osd.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-purge-osd.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-system.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/roles/rook-ceph-system.yaml",
                                "cd+++++++++ objects/namespaced/rook-ceph/secrets/",
                                ">f+++++++++ objects/namespaced/rook-ceph/secrets/sh.helm.release.v1.rook-ceph.v1.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/secrets/sh.helm.release.v1.rook-ceph.v1.yaml",
                                "cd+++++++++ objects/namespaced/rook-ceph/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/default.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/objectstorage-provisioner.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/objectstorage-provisioner.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-cmd-reporter.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-cmd-reporter.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-default.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-default.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-mgr.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-mgr.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-osd.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-osd.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-purge-osd.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-purge-osd.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-rgw.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-rgw.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-system.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-ceph-system.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-cephfs-plugin-sa.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-cephfs-plugin-sa.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-cephfs-provisioner-sa.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-cephfs-provisioner-sa.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-rbd-plugin-sa.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-rbd-plugin-sa.yaml",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-rbd-provisioner-sa.txt",
                                ">f+++++++++ objects/namespaced/rook-ceph/serviceaccounts/rook-csi-rbd-provisioner-sa.yaml",
                                "cd+++++++++ objects/namespaced/secretgen-controller/",
                                "cd+++++++++ objects/namespaced/secretgen-controller/configmaps/",
                                ">f+++++++++ objects/namespaced/secretgen-controller/configmaps/kube-root-ca.crt.txt",
                                ">f+++++++++ objects/namespaced/secretgen-controller/configmaps/kube-root-ca.crt.yaml",
                                "cd+++++++++ objects/namespaced/secretgen-controller/deployment/",
                                ">f+++++++++ objects/namespaced/secretgen-controller/deployment/secretgen-controller.txt",
                                ">f+++++++++ objects/namespaced/secretgen-controller/deployment/secretgen-controller.yaml",
                                "cd+++++++++ objects/namespaced/secretgen-controller/pods/",
                                ">f+++++++++ objects/namespaced/secretgen-controller/pods/secretgen-controller-5cf976ccc7-szs5h.txt",
                                ">f+++++++++ objects/namespaced/secretgen-controller/pods/secretgen-controller-5cf976ccc7-szs5h.yaml",
                                "cd+++++++++ objects/namespaced/secretgen-controller/serviceaccounts/",
                                ">f+++++++++ objects/namespaced/secretgen-controller/serviceaccounts/default.txt",
                                ">f+++++++++ objects/namespaced/secretgen-controller/serviceaccounts/default.yaml",
                                ">f+++++++++ objects/namespaced/secretgen-controller/serviceaccounts/secretgen-controller-sa.txt",
                                ">f+++++++++ objects/namespaced/secretgen-controller/serviceaccounts/secretgen-controller-sa.yaml"
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-000000000017",
                        "name": "describe-kubernetes-objects",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/describe-kubernetes-objects"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:28:57.759827Z",
                            "start": "2026-03-01T22:28:56.797659Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-00000000001d",
                        "name": "Downloads logs to executor"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/tmp/logs/pod-logs",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/tmp/logs/pod-logs",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": null,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/tmp/logs/pod-logs",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/tmp/logs/pod-logs",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-00000000001f",
                        "name": "gather-pod-logs",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-pod-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:28:58.000181Z",
                            "start": "2026-03-01T22:28:57.782456Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-000000000021",
                        "name": "creating directory for pod logs"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/tmp/logs/pod-logs/failed-pods",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/tmp/logs/pod-logs/failed-pods",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": null,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/tmp/logs/pod-logs/failed-pods",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/tmp/logs/pod-logs/failed-pods",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-00000000001f",
                        "name": "gather-pod-logs",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-pod-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:28:58.224549Z",
                            "start": "2026-03-01T22:28:58.007523Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-000000000022",
                        "name": "creating directory for failed pod logs"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -e\nfunction get_namespaces () {\n  kubectl get namespaces -o name | awk -F '/' '{ print $NF }'\n}\nfunction get_pods () {\n  NAMESPACE=$1\n  kubectl get pods -n ${NAMESPACE} -o name | awk -F '/' '{ print $NF }' | xargs -I {} echo ${NAMESPACE} {}\n}\nexport -f get_pods\nfunction get_pod_logs () {\n  NAMESPACE=${1% *}\n  POD=${1#* }\n  INIT_CONTAINERS=$(kubectl get pod $POD -n ${NAMESPACE} -o json | jq -r '.spec.initContainers[]?.name')\n  CONTAINERS=$(kubectl get pod $POD -n ${NAMESPACE} -o json | jq -r '.spec.containers[].name')\n  for CONTAINER in ${INIT_CONTAINERS} ${CONTAINERS}; do\n    echo \"${NAMESPACE}/${POD}/${CONTAINER}\"\n    mkdir -p \"/tmp/logs/pod-logs/${NAMESPACE}/${POD}\"\n    mkdir -p \"/tmp/logs/pod-logs/failed-pods/${NAMESPACE}/${POD}\"\n    kubectl logs ${POD} -n ${NAMESPACE} -c ${CONTAINER} > \"/tmp/logs/pod-logs/${NAMESPACE}/${POD}/${CONTAINER}.txt\"\n    kubectl logs --previous ${POD} -n ${NAMESPACE} -c ${CONTAINER} > \"/tmp/logs/pod-logs/failed-pods/${NAMESPACE}/${POD}/${CONTAINER}.txt\"\n  done\n}\nexport -f get_pod_logs\nget_namespaces |  xargs -r -I {} bash -c 'get_pods \"$@\"' _ {} |  xargs -r -I {} bash -c 'get_pod_logs \"$@\"' _ {}",
                            "delta": "0:02:12.385933",
                            "end": "2026-03-01 22:31:10.841961",
                            "failed": true,
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -e\nfunction get_namespaces () {\n  kubectl get namespaces -o name | awk -F '/' '{ print $NF }'\n}\nfunction get_pods () {\n  NAMESPACE=$1\n  kubectl get pods -n ${NAMESPACE} -o name | awk -F '/' '{ print $NF }' | xargs -I {} echo ${NAMESPACE} {}\n}\nexport -f get_pods\nfunction get_pod_logs () {\n  NAMESPACE=${1% *}\n  POD=${1#* }\n  INIT_CONTAINERS=$(kubectl get pod $POD -n ${NAMESPACE} -o json | jq -r '.spec.initContainers[]?.name')\n  CONTAINERS=$(kubectl get pod $POD -n ${NAMESPACE} -o json | jq -r '.spec.containers[].name')\n  for CONTAINER in ${INIT_CONTAINERS} ${CONTAINERS}; do\n    echo \"${NAMESPACE}/${POD}/${CONTAINER}\"\n    mkdir -p \"/tmp/logs/pod-logs/${NAMESPACE}/${POD}\"\n    mkdir -p \"/tmp/logs/pod-logs/failed-pods/${NAMESPACE}/${POD}\"\n    kubectl logs ${POD} -n ${NAMESPACE} -c ${CONTAINER} > \"/tmp/logs/pod-logs/${NAMESPACE}/${POD}/${CONTAINER}.txt\"\n    kubectl logs --previous ${POD} -n ${NAMESPACE} -c ${CONTAINER} > \"/tmp/logs/pod-logs/failed-pods/${NAMESPACE}/${POD}/${CONTAINER}.txt\"\n  done\n}\nexport -f get_pod_logs\nget_namespaces |  xargs -r -I {} bash -c 'get_pods \"$@\"' _ {} |  xargs -r -I {} bash -c 'get_pod_logs \"$@\"' _ {}",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0011-aaa1-afc9-000000000023-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "non-zero return code",
                            "rc": 123,
                            "start": "2026-03-01 22:28:58.456028",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "auth-system/keycloak-0/keycloak\nError from server (BadRequest): previous terminated container \"keycloak\" in pod \"keycloak-0\" not found\ncapi-kubeadm-bootstrap-system/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k/manager\nError from server (BadRequest): previous terminated container \"manager\" in pod \"capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k\" not found\ncapi-kubeadm-control-plane-system/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4/manager\nError from server (BadRequest): previous terminated container \"manager\" in pod \"capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4\" not found\ncapi-system/capi-controller-manager-bc4cf8c95-w8p6b/manager\nError from server (BadRequest): previous terminated container \"manager\" in pod \"capi-controller-manager-bc4cf8c95-w8p6b\" not found\ncapo-system/capo-controller-manager-6975759b4b-tkxrs/manager\nError from server (BadRequest): previous terminated container \"manager\" in pod \"capo-controller-manager-6975759b4b-tkxrs\" not found\ncert-manager/cert-manager-75c4c745bc-45s4r/cert-manager-controller\nError from server (BadRequest): previous terminated container \"cert-manager-controller\" in pod \"cert-manager-75c4c745bc-45s4r\" not found\ncert-manager/cert-manager-cainjector-64b59ddb75-tl5x7/cert-manager-cainjector\nError from server (BadRequest): previous terminated container \"cert-manager-cainjector\" in pod \"cert-manager-cainjector-64b59ddb75-tl5x7\" not found\ncert-manager/cert-manager-webhook-548949fc64-vkrlt/cert-manager-webhook\nError from server (BadRequest): previous terminated container \"cert-manager-webhook\" in pod \"cert-manager-webhook-548949fc64-vkrlt\" not found\ningress-nginx/ingress-nginx-controller-j4bqv/controller\nError from server (BadRequest): previous terminated container \"controller\" in pod \"ingress-nginx-controller-j4bqv\" not found\ningress-nginx/ingress-nginx-defaultbackend-6987ff55cf-gpx4l/ingress-nginx-default-backend\nError from server (BadRequest): previous terminated container \"ingress-nginx-default-backend\" in pod \"ingress-nginx-defaultbackend-6987ff55cf-gpx4l\" not found\nkube-system/cilium-operator-869df985b8-kszk2/cilium-operator\nError from server (BadRequest): previous terminated container \"cilium-operator\" in pod \"cilium-operator-869df985b8-kszk2\" not found\nkube-system/cilium-vdz4f/config\nError from server (BadRequest): previous terminated container \"config\" in pod \"cilium-vdz4f\" not found\nkube-system/cilium-vdz4f/mount-cgroup\nError from server (BadRequest): previous terminated container \"mount-cgroup\" in pod \"cilium-vdz4f\" not found\nkube-system/cilium-vdz4f/apply-sysctl-overwrites\nError from server (BadRequest): previous terminated container \"apply-sysctl-overwrites\" in pod \"cilium-vdz4f\" not found\nkube-system/cilium-vdz4f/mount-bpf-fs\nError from server (BadRequest): previous terminated container \"mount-bpf-fs\" in pod \"cilium-vdz4f\" not found\nkube-system/cilium-vdz4f/clean-cilium-state\nError from server (BadRequest): previous terminated container \"clean-cilium-state\" in pod \"cilium-vdz4f\" not found\nkube-system/cilium-vdz4f/install-cni-binaries\nError from server (BadRequest): previous terminated container \"install-cni-binaries\" in pod \"cilium-vdz4f\" not found\nkube-system/cilium-vdz4f/cilium-agent\nError from server (BadRequest): previous terminated container \"cilium-agent\" in pod \"cilium-vdz4f\" not found\nkube-system/coredns-67659f764b-6f2mm/coredns\nError from server (BadRequest): previous terminated container \"coredns\" in pod \"coredns-67659f764b-6f2mm\" not found\nkube-system/coredns-67659f764b-j6fp4/coredns\nError from server (BadRequest): previous terminated container \"coredns\" in pod \"coredns-67659f764b-j6fp4\" not found\nkube-system/etcd-instance/etcd\nError from server (BadRequest): previous terminated container \"etcd\" in pod \"etcd-instance\" not found\nkube-system/kube-apiserver-instance/kube-apiserver\nError from server (BadRequest): previous terminated container \"kube-apiserver\" in pod \"kube-apiserver-instance\" not found\nkube-system/kube-controller-manager-instance/kube-controller-manager\nError from server (BadRequest): previous terminated container \"kube-controller-manager\" in pod \"kube-controller-manager-instance\" not found\nkube-system/kube-proxy-sp2vs/kube-proxy\nError from server (BadRequest): previous terminated container \"kube-proxy\" in pod \"kube-proxy-sp2vs\" not found\nkube-system/kube-scheduler-instance/kube-scheduler\nError from server (BadRequest): previous terminated container \"kube-scheduler\" in pod \"kube-scheduler-instance\" not found\nkube-system/kube-vip-instance/kube-vip\nError from server (BadRequest): previous terminated container \"kube-vip\" in pod \"kube-vip-instance\" not found\nlocal-path-storage/local-path-provisioner-679c578f5-7h8w5/local-path-provisioner\nError from server (BadRequest): previous terminated container \"local-path-provisioner\" in pod \"local-path-provisioner-679c578f5-7h8w5\" not found\nmonitoring/alertmanager-kube-prometheus-stack-alertmanager-0/init-config-reloader\nError from server (BadRequest): previous terminated container \"init-config-reloader\" in pod \"alertmanager-kube-prometheus-stack-alertmanager-0\" not found\nmonitoring/alertmanager-kube-prometheus-stack-alertmanager-0/alertmanager\nError from server (BadRequest): previous terminated container \"alertmanager\" in pod \"alertmanager-kube-prometheus-stack-alertmanager-0\" not found\nmonitoring/alertmanager-kube-prometheus-stack-alertmanager-0/config-reloader\nError from server (BadRequest): previous terminated container \"config-reloader\" in pod \"alertmanager-kube-prometheus-stack-alertmanager-0\" not found\nmonitoring/alertmanager-kube-prometheus-stack-alertmanager-0/oauth2-proxy\nError from server (BadRequest): previous terminated container \"oauth2-proxy\" in pod \"alertmanager-kube-prometheus-stack-alertmanager-0\" not found\nmonitoring/goldpinger-7jzp8/goldpinger-daemon\nError from server (BadRequest): previous terminated container \"goldpinger-daemon\" in pod \"goldpinger-7jzp8\" not found\nmonitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana-sc-dashboard\nError from server (BadRequest): previous terminated container \"grafana-sc-dashboard\" in pod \"kube-prometheus-stack-grafana-668bfb9659-ft52b\" not found\nmonitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana-sc-datasources\nError from server (BadRequest): previous terminated container \"grafana-sc-datasources\" in pod \"kube-prometheus-stack-grafana-668bfb9659-ft52b\" not found\nmonitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana\nError from server (BadRequest): previous terminated container \"grafana\" in pod \"kube-prometheus-stack-grafana-668bfb9659-ft52b\" not found\nmonitoring/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m/kube-state-metrics\nError from server (BadRequest): previous terminated container \"kube-state-metrics\" in pod \"kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m\" not found\nmonitoring/kube-prometheus-stack-operator-cd88cf4bf-lzh7g/kube-prometheus-stack\nError from server (BadRequest): previous terminated container \"kube-prometheus-stack\" in pod \"kube-prometheus-stack-operator-cd88cf4bf-lzh7g\" not found\nmonitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/node-exporter\nmonitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/pod-tls-sidecar\nError from server (BadRequest): previous terminated container \"pod-tls-sidecar\" in pod \"kube-prometheus-stack-prometheus-node-exporter-59qlm\" not found\nmonitoring/loki-0/loki\nError from server (BadRequest): previous terminated container \"loki\" in pod \"loki-0\" not found\nmonitoring/loki-chunks-cache-0/memcached\nError from server (BadRequest): previous terminated container \"memcached\" in pod \"loki-chunks-cache-0\" not found\nmonitoring/loki-chunks-cache-0/exporter\nError from server (BadRequest): previous terminated container \"exporter\" in pod \"loki-chunks-cache-0\" not found\nmonitoring/loki-gateway-cf54cb88c-zv654/nginx\nError from server (BadRequest): previous terminated container \"nginx\" in pod \"loki-gateway-cf54cb88c-zv654\" not found\nmonitoring/loki-results-cache-0/memcached\nError from server (BadRequest): previous terminated container \"memcached\" in pod \"loki-results-cache-0\" not found\nmonitoring/loki-results-cache-0/exporter\nError from server (BadRequest): previous terminated container \"exporter\" in pod \"loki-results-cache-0\" not found\nmonitoring/node-feature-discovery-gc-6675cbb6d9-zv9sn/gc\nError from server (BadRequest): previous terminated container \"gc\" in pod \"node-feature-discovery-gc-6675cbb6d9-zv9sn\" not found\nmonitoring/node-feature-discovery-master-8665476dbc-t4z5z/master\nError from server (BadRequest): previous terminated container \"master\" in pod \"node-feature-discovery-master-8665476dbc-t4z5z\" not found\nmonitoring/node-feature-discovery-worker-p8lmk/worker\nError from server (BadRequest): previous terminated container \"worker\" in pod \"node-feature-discovery-worker-p8lmk\" not found\nmonitoring/prometheus-kube-prometheus-stack-prometheus-0/init-config-reloader\nError from server (BadRequest): previous terminated container \"init-config-reloader\" in pod \"prometheus-kube-prometheus-stack-prometheus-0\" not found\nmonitoring/prometheus-kube-prometheus-stack-prometheus-0/prometheus\nError from server (BadRequest): previous terminated container \"prometheus\" in pod \"prometheus-kube-prometheus-stack-prometheus-0\" not found\nmonitoring/prometheus-kube-prometheus-stack-prometheus-0/config-reloader\nError from server (BadRequest): previous terminated container \"config-reloader\" in pod \"prometheus-kube-prometheus-stack-prometheus-0\" not found\nmonitoring/prometheus-kube-prometheus-stack-prometheus-0/pod-tls-sidecar\nError from server (BadRequest): previous terminated container \"pod-tls-sidecar\" in pod \"prometheus-kube-prometheus-stack-prometheus-0\" not found\nmonitoring/prometheus-kube-prometheus-stack-prometheus-0/oauth2-proxy\nError from server (BadRequest): previous terminated container \"oauth2-proxy\" in pod \"prometheus-kube-prometheus-stack-prometheus-0\" not found\nmonitoring/prometheus-pushgateway-7b8659c68b-28dht/pushgateway\nError from server (BadRequest): previous terminated container \"pushgateway\" in pod \"prometheus-pushgateway-7b8659c68b-28dht\" not found\nmonitoring/vector-qzjms/vector\nError from server (BadRequest): previous terminated container \"vector\" in pod \"vector-qzjms\" not found\nopenstack/barbican-api-775987496d-z6jqv/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"barbican-api-775987496d-z6jqv\" not found\nopenstack/barbican-api-775987496d-z6jqv/barbican-api\nError from server (BadRequest): previous terminated container \"barbican-api\" in pod \"barbican-api-775987496d-z6jqv\" not found\nopenstack/barbican-db-init-nm8k6/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"barbican-db-init-nm8k6\" not found\nopenstack/barbican-db-init-nm8k6/barbican-db-init-0\nError from server (BadRequest): previous terminated container \"barbican-db-init-0\" in pod \"barbican-db-init-nm8k6\" not found\nopenstack/barbican-db-sync-452x5/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"barbican-db-sync-452x5\" not found\nopenstack/barbican-db-sync-452x5/barbican-db-sync\nError from server (BadRequest): previous terminated container \"barbican-db-sync\" in pod \"barbican-db-sync-452x5\" not found\nopenstack/barbican-ks-endpoints-w2ffg/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"barbican-ks-endpoints-w2ffg\" not found\nopenstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-admin\nError from server (BadRequest): previous terminated container \"key-manager-ks-endpoints-admin\" in pod \"barbican-ks-endpoints-w2ffg\" not found\nopenstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-internal\nError from server (BadRequest): previous terminated container \"key-manager-ks-endpoints-internal\" in pod \"barbican-ks-endpoints-w2ffg\" not found\nopenstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-public\nError from server (BadRequest): previous terminated container \"key-manager-ks-endpoints-public\" in pod \"barbican-ks-endpoints-w2ffg\" not found\nopenstack/barbican-ks-service-8pm7j/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"barbican-ks-service-8pm7j\" not found\nopenstack/barbican-ks-service-8pm7j/key-manager-ks-service-registration\nError from server (BadRequest): previous terminated container \"key-manager-ks-service-registration\" in pod \"barbican-ks-service-8pm7j\" not found\nopenstack/barbican-ks-user-fszfr/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"barbican-ks-user-fszfr\" not found\nopenstack/barbican-ks-user-fszfr/barbican-ks-user\nError from server (BadRequest): previous terminated container \"barbican-ks-user\" in pod \"barbican-ks-user-fszfr\" not found\nopenstack/barbican-rabbit-init-j5qmd/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"barbican-rabbit-init-j5qmd\" not found\nopenstack/barbican-rabbit-init-j5qmd/rabbit-init\nError from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"barbican-rabbit-init-j5qmd\" not found\nopenstack/cinder-api-86d7694f66-j97gj/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"cinder-api-86d7694f66-j97gj\" not found\nopenstack/cinder-api-86d7694f66-j97gj/ceph-coordination-volume-perms\nError from server (BadRequest): previous terminated container \"ceph-coordination-volume-perms\" in pod \"cinder-api-86d7694f66-j97gj\" not found\nopenstack/cinder-api-86d7694f66-j97gj/cinder-api\nError from server (BadRequest): previous terminated container \"cinder-api\" in pod \"cinder-api-86d7694f66-j97gj\" not found\nopenstack/cinder-backup-dcfd7dfb7-sdwkc/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"cinder-backup-dcfd7dfb7-sdwkc\" not found\nopenstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-backup-keyring-placement\nError from server (BadRequest): previous terminated container \"ceph-backup-keyring-placement\" in pod \"cinder-backup-dcfd7dfb7-sdwkc\" not found\nopenstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-keyring-placement-rbd1\nError from server (BadRequest): previous terminated container \"ceph-keyring-placement-rbd1\" in pod \"cinder-backup-dcfd7dfb7-sdwkc\" not found\nopenstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-coordination-volume-perms\nError from server (BadRequest): previous terminated container \"ceph-coordination-volume-perms\" in pod \"cinder-backup-dcfd7dfb7-sdwkc\" not found\nopenstack/cinder-backup-dcfd7dfb7-sdwkc/cinder-backup\nError from server (BadRequest): previous terminated container \"cinder-backup\" in pod \"cinder-backup-dcfd7dfb7-sdwkc\" not found\nopenstack/cinder-backup-storage-init-zmnkh/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"cinder-backup-storage-init-zmnkh\" not found\nopenstack/cinder-backup-storage-init-zmnkh/ceph-keyring-placement\nError from server (BadRequest): previous terminated container \"ceph-keyring-placement\" in pod \"cinder-backup-storage-init-zmnkh\" not found\nopenstack/cinder-backup-storage-init-zmnkh/cinder-backup-storage-init\nError from server (BadRequest): previous terminated container \"cinder-backup-storage-init\" in pod \"cinder-backup-storage-init-zmnkh\" not found\nopenstack/cinder-bootstrap-wng86/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"cinder-bootstrap-wng86\" not found\nopenstack/cinder-bootstrap-wng86/bootstrap\nError from server (BadRequest): previous terminated container \"bootstrap\" in pod \"cinder-bootstrap-wng86\" not found\nopenstack/cinder-create-internal-tenant-6vgll/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"cinder-create-internal-tenant-6vgll\" not found\nopenstack/cinder-create-internal-tenant-6vgll/create-internal-tenant\nError from server (BadRequest): previous terminated container \"create-internal-tenant\" in pod \"cinder-create-internal-tenant-6vgll\" not found\nopenstack/cinder-db-init-mzm5b/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"cinder-db-init-mzm5b\" not found\nopenstack/cinder-db-init-mzm5b/cinder-db-init-0\nError from server (BadRequest): previous terminated container \"cinder-db-init-0\" in pod \"cinder-db-init-mzm5b\" not found\nopenstack/cinder-db-sync-mz6ls/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"cinder-db-sync-mz6ls\" not found\nopenstack/cinder-db-sync-mz6ls/cinder-db-sync\nError from server (BadRequest): previous terminated container \"cinder-db-sync\" in pod \"cinder-db-sync-mz6ls\" not found\nopenstack/cinder-ks-endpoints-xv2tb/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"cinder-ks-endpoints-xv2tb\" not found\nopenstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-admin\nError from server (BadRequest): previous terminated container \"volumev3-ks-endpoints-admin\" in pod \"cinder-ks-endpoints-xv2tb\" not found\nopenstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-internal\nError from server (BadRequest): previous terminated container \"volumev3-ks-endpoints-internal\" in pod \"cinder-ks-endpoints-xv2tb\" not found\nopenstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-public\nError from server (BadRequest): previous terminated container \"volumev3-ks-endpoints-public\" in pod \"cinder-ks-endpoints-xv2tb\" not found\nopenstack/cinder-ks-service-dlcxz/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"cinder-ks-service-dlcxz\" not found\nopenstack/cinder-ks-service-dlcxz/volumev3-ks-service-registration\nError from server (BadRequest): previous terminated container \"volumev3-ks-service-registration\" in pod \"cinder-ks-service-dlcxz\" not found\nopenstack/cinder-ks-user-5bd5g/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"cinder-ks-user-5bd5g\" not found\nopenstack/cinder-ks-user-5bd5g/cinder-ks-user\nError from server (BadRequest): previous terminated container \"cinder-ks-user\" in pod \"cinder-ks-user-5bd5g\" not found\nopenstack/cinder-rabbit-init-l4fpm/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"cinder-rabbit-init-l4fpm\" not found\nopenstack/cinder-rabbit-init-l4fpm/rabbit-init\nError from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"cinder-rabbit-init-l4fpm\" not found\nopenstack/cinder-scheduler-586f444995-p7grf/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"cinder-scheduler-586f444995-p7grf\" not found\nopenstack/cinder-scheduler-586f444995-p7grf/ceph-coordination-volume-perms\nError from server (BadRequest): previous terminated container \"ceph-coordination-volume-perms\" in pod \"cinder-scheduler-586f444995-p7grf\" not found\nopenstack/cinder-scheduler-586f444995-p7grf/cinder-scheduler\nError from server (BadRequest): previous terminated container \"cinder-scheduler\" in pod \"cinder-scheduler-586f444995-p7grf\" not found\nopenstack/cinder-storage-init-vt6br/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"cinder-storage-init-vt6br\" not found\nopenstack/cinder-storage-init-vt6br/ceph-keyring-placement\nError from server (BadRequest): previous terminated container \"ceph-keyring-placement\" in pod \"cinder-storage-init-vt6br\" not found\nopenstack/cinder-storage-init-vt6br/cinder-storage-init-rbd1\nError from server (BadRequest): previous terminated container \"cinder-storage-init-rbd1\" in pod \"cinder-storage-init-vt6br\" not found\nopenstack/cinder-volume-66dc847979-qgp4l/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"cinder-volume-66dc847979-qgp4l\" not found\nopenstack/cinder-volume-66dc847979-qgp4l/ceph-keyring-placement-rbd1\nError from server (BadRequest): previous terminated container \"ceph-keyring-placement-rbd1\" in pod \"cinder-volume-66dc847979-qgp4l\" not found\nopenstack/cinder-volume-66dc847979-qgp4l/ceph-coordination-volume-perms\nError from server (BadRequest): previous terminated container \"ceph-coordination-volume-perms\" in pod \"cinder-volume-66dc847979-qgp4l\" not found\nopenstack/cinder-volume-66dc847979-qgp4l/init-cinder-conf\nError from server (BadRequest): previous terminated container \"init-cinder-conf\" in pod \"cinder-volume-66dc847979-qgp4l\" not found\nopenstack/cinder-volume-66dc847979-qgp4l/cinder-volume\nError from server (BadRequest): previous terminated container \"cinder-volume\" in pod \"cinder-volume-66dc847979-qgp4l\" not found\nopenstack/cinder-volume-usage-audit-29540045-jbmvh/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"cinder-volume-usage-audit-29540045-jbmvh\" not found\nopenstack/cinder-volume-usage-audit-29540045-jbmvh/cinder-volume-usage-audit\nError from server (BadRequest): previous terminated container \"cinder-volume-usage-audit\" in pod \"cinder-volume-usage-audit-29540045-jbmvh\" not found\nopenstack/glance-api-65d579bfc8-6x76l/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"glance-api-65d579bfc8-6x76l\" not found\nopenstack/glance-api-65d579bfc8-6x76l/glance-perms\nError from server (BadRequest): previous terminated container \"glance-perms\" in pod \"glance-api-65d579bfc8-6x76l\" not found\nopenstack/glance-api-65d579bfc8-6x76l/ceph-keyring-placement\nError from server (BadRequest): previous terminated container \"ceph-keyring-placement\" in pod \"glance-api-65d579bfc8-6x76l\" not found\nopenstack/glance-api-65d579bfc8-6x76l/glance-api\nError from server (BadRequest): previous terminated container \"glance-api\" in pod \"glance-api-65d579bfc8-6x76l\" not found\nopenstack/glance-db-init-wbpff/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"glance-db-init-wbpff\" not found\nopenstack/glance-db-init-wbpff/glance-db-init-0\nError from server (BadRequest): previous terminated container \"glance-db-init-0\" in pod \"glance-db-init-wbpff\" not found\nopenstack/glance-db-sync-gk84f/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"glance-db-sync-gk84f\" not found\nopenstack/glance-db-sync-gk84f/glance-db-sync\nError from server (BadRequest): previous terminated container \"glance-db-sync\" in pod \"glance-db-sync-gk84f\" not found\nopenstack/glance-ks-endpoints-dq2cc/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"glance-ks-endpoints-dq2cc\" not found\nopenstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-admin\nError from server (BadRequest): previous terminated container \"image-ks-endpoints-admin\" in pod \"glance-ks-endpoints-dq2cc\" not found\nopenstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-internal\nError from server (BadRequest): previous terminated container \"image-ks-endpoints-internal\" in pod \"glance-ks-endpoints-dq2cc\" not found\nopenstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-public\nError from server (BadRequest): previous terminated container \"image-ks-endpoints-public\" in pod \"glance-ks-endpoints-dq2cc\" not found\nopenstack/glance-ks-service-5h6bw/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"glance-ks-service-5h6bw\" not found\nopenstack/glance-ks-service-5h6bw/image-ks-service-registration\nError from server (BadRequest): previous terminated container \"image-ks-service-registration\" in pod \"glance-ks-service-5h6bw\" not found\nopenstack/glance-ks-user-lcfxr/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"glance-ks-user-lcfxr\" not found\nopenstack/glance-ks-user-lcfxr/glance-ks-user\nError from server (BadRequest): previous terminated container \"glance-ks-user\" in pod \"glance-ks-user-lcfxr\" not found\nopenstack/glance-metadefs-load-476tp/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"glance-metadefs-load-476tp\" not found\nopenstack/glance-metadefs-load-476tp/glance-metadefs-load\nError from server (BadRequest): previous terminated container \"glance-metadefs-load\" in pod \"glance-metadefs-load-476tp\" not found\nopenstack/glance-rabbit-init-c6rjt/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"glance-rabbit-init-c6rjt\" not found\nopenstack/glance-rabbit-init-c6rjt/rabbit-init\nError from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"glance-rabbit-init-c6rjt\" not found\nopenstack/glance-storage-init-hdcpc/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"glance-storage-init-hdcpc\" not found\nopenstack/glance-storage-init-hdcpc/ceph-keyring-placement\nError from server (BadRequest): previous terminated container \"ceph-keyring-placement\" in pod \"glance-storage-init-hdcpc\" not found\nopenstack/glance-storage-init-hdcpc/glance-storage-init\nError from server (BadRequest): previous terminated container \"glance-storage-init\" in pod \"glance-storage-init-hdcpc\" not found\nopenstack/heat-api-6d65f9477-kmbkl/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"heat-api-6d65f9477-kmbkl\" not found\nopenstack/heat-api-6d65f9477-kmbkl/heat-api\nError from server (BadRequest): previous terminated container \"heat-api\" in pod \"heat-api-6d65f9477-kmbkl\" not found\nopenstack/heat-bootstrap-9dwg2/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"heat-bootstrap-9dwg2\" not found\nopenstack/heat-bootstrap-9dwg2/bootstrap\nError from server (BadRequest): previous terminated container \"bootstrap\" in pod \"heat-bootstrap-9dwg2\" not found\nopenstack/heat-cfn-f44db7787-t8f7m/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"heat-cfn-f44db7787-t8f7m\" not found\nopenstack/heat-cfn-f44db7787-t8f7m/heat-cfn\nError from server (BadRequest): previous terminated container \"heat-cfn\" in pod \"heat-cfn-f44db7787-t8f7m\" not found\nopenstack/heat-db-init-fk8qw/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"heat-db-init-fk8qw\" not found\nopenstack/heat-db-init-fk8qw/heat-db-init-0\nError from server (BadRequest): previous terminated container \"heat-db-init-0\" in pod \"heat-db-init-fk8qw\" not found\nopenstack/heat-db-sync-cxmcb/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"heat-db-sync-cxmcb\" not found\nopenstack/heat-db-sync-cxmcb/heat-db-sync\nError from server (BadRequest): previous terminated container \"heat-db-sync\" in pod \"heat-db-sync-cxmcb\" not found\nopenstack/heat-domain-ks-user-tq2c5/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"heat-domain-ks-user-tq2c5\" not found\nopenstack/heat-domain-ks-user-tq2c5/heat-ks-domain-user\nError from server (BadRequest): previous terminated container \"heat-ks-domain-user\" in pod \"heat-domain-ks-user-tq2c5\" not found\nopenstack/heat-engine-64f8b77bfb-wngkr/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"heat-engine-64f8b77bfb-wngkr\" not found\nopenstack/heat-engine-64f8b77bfb-wngkr/heat-engine\nError from server (BadRequest): previous terminated container \"heat-engine\" in pod \"heat-engine-64f8b77bfb-wngkr\" not found\nopenstack/heat-engine-cleaner-29540055-p9pq9/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"heat-engine-cleaner-29540055-p9pq9\" not found\nopenstack/heat-engine-cleaner-29540055-p9pq9/heat-engine-cleaner\nError from server (BadRequest): previous terminated container \"heat-engine-cleaner\" in pod \"heat-engine-cleaner-29540055-p9pq9\" not found\nopenstack/heat-engine-cleaner-29540060-z4g95/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"heat-engine-cleaner-29540060-z4g95\" not found\nopenstack/heat-engine-cleaner-29540060-z4g95/heat-engine-cleaner\nError from server (BadRequest): previous terminated container \"heat-engine-cleaner\" in pod \"heat-engine-cleaner-29540060-z4g95\" not found\nopenstack/heat-engine-cleaner-29540065-rcjr2/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"heat-engine-cleaner-29540065-rcjr2\" not found\nopenstack/heat-engine-cleaner-29540065-rcjr2/heat-engine-cleaner\nError from server (BadRequest): previous terminated container \"heat-engine-cleaner\" in pod \"heat-engine-cleaner-29540065-rcjr2\" not found\nopenstack/heat-ks-endpoints-wwzbz/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"heat-ks-endpoints-wwzbz\" not found\nopenstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-admin\nError from server (BadRequest): previous terminated container \"orchestration-ks-endpoints-admin\" in pod \"heat-ks-endpoints-wwzbz\" not found\nopenstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-internal\nError from server (BadRequest): previous terminated container \"orchestration-ks-endpoints-internal\" in pod \"heat-ks-endpoints-wwzbz\" not found\nopenstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-public\nError from server (BadRequest): previous terminated container \"orchestration-ks-endpoints-public\" in pod \"heat-ks-endpoints-wwzbz\" not found\nopenstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-admin\nError from server (BadRequest): previous terminated container \"cloudformation-ks-endpoints-admin\" in pod \"heat-ks-endpoints-wwzbz\" not found\nopenstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-internal\nError from server (BadRequest): previous terminated container \"cloudformation-ks-endpoints-internal\" in pod \"heat-ks-endpoints-wwzbz\" not found\nopenstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-public\nError from server (BadRequest): previous terminated container \"cloudformation-ks-endpoints-public\" in pod \"heat-ks-endpoints-wwzbz\" not found\nopenstack/heat-ks-service-8pxqz/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"heat-ks-service-8pxqz\" not found\nopenstack/heat-ks-service-8pxqz/orchestration-ks-service-registration\nError from server (BadRequest): previous terminated container \"orchestration-ks-service-registration\" in pod \"heat-ks-service-8pxqz\" not found\nopenstack/heat-ks-service-8pxqz/cloudformation-ks-service-registration\nError from server (BadRequest): previous terminated container \"cloudformation-ks-service-registration\" in pod \"heat-ks-service-8pxqz\" not found\nopenstack/heat-ks-user-tfk98/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"heat-ks-user-tfk98\" not found\nopenstack/heat-ks-user-tfk98/heat-ks-user\nError from server (BadRequest): previous terminated container \"heat-ks-user\" in pod \"heat-ks-user-tfk98\" not found\nopenstack/heat-ks-user-tfk98/heat-trustee-ks-user\nError from server (BadRequest): previous terminated container \"heat-trustee-ks-user\" in pod \"heat-ks-user-tfk98\" not found\nopenstack/heat-rabbit-init-rbl9n/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"heat-rabbit-init-rbl9n\" not found\nopenstack/heat-rabbit-init-rbl9n/rabbit-init\nError from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"heat-rabbit-init-rbl9n\" not found\nopenstack/heat-trusts-czrrv/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"heat-trusts-czrrv\" not found\nopenstack/heat-trusts-czrrv/heat-trusts\nError from server (BadRequest): previous terminated container \"heat-trusts\" in pod \"heat-trusts-czrrv\" not found\nopenstack/horizon-8cdd7b888-bvzvx/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"horizon-8cdd7b888-bvzvx\" not found\nopenstack/horizon-8cdd7b888-bvzvx/horizon\nError from server (BadRequest): previous terminated container \"horizon\" in pod \"horizon-8cdd7b888-bvzvx\" not found\nopenstack/horizon-db-init-s5pbw/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"horizon-db-init-s5pbw\" not found\nopenstack/horizon-db-init-s5pbw/horizon-db-init-0\nError from server (BadRequest): previous terminated container \"horizon-db-init-0\" in pod \"horizon-db-init-s5pbw\" not found\nopenstack/horizon-db-sync-bgr2g/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"horizon-db-sync-bgr2g\" not found\nopenstack/horizon-db-sync-bgr2g/horizon-db-sync\nError from server (BadRequest): previous terminated container \"horizon-db-sync\" in pod \"horizon-db-sync-bgr2g\" not found\nopenstack/keepalived-7jdfz/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"keepalived-7jdfz\" not found\nopenstack/keepalived-7jdfz/wait-for-ip\nError from server (BadRequest): previous terminated container \"wait-for-ip\" in pod \"keepalived-7jdfz\" not found\nopenstack/keepalived-7jdfz/keepalived\nError from server (BadRequest): previous terminated container \"keepalived\" in pod \"keepalived-7jdfz\" not found\nopenstack/keystone-api-c4656754c-mqbxm/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"keystone-api-c4656754c-mqbxm\" not found\nopenstack/keystone-api-c4656754c-mqbxm/keystone-api\nError from server (BadRequest): previous terminated container \"keystone-api\" in pod \"keystone-api-c4656754c-mqbxm\" not found\nopenstack/keystone-bootstrap-mdtrx/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"keystone-bootstrap-mdtrx\" not found\nopenstack/keystone-bootstrap-mdtrx/bootstrap\nError from server (BadRequest): previous terminated container \"bootstrap\" in pod \"keystone-bootstrap-mdtrx\" not found\nopenstack/keystone-credential-setup-6xsvx/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"keystone-credential-setup-6xsvx\" not found\nopenstack/keystone-credential-setup-6xsvx/keystone-credential-setup\nError from server (BadRequest): previous terminated container \"keystone-credential-setup\" in pod \"keystone-credential-setup-6xsvx\" not found\nopenstack/keystone-db-init-z5mwz/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"keystone-db-init-z5mwz\" not found\nopenstack/keystone-db-init-z5mwz/keystone-db-init-0\nError from server (BadRequest): previous terminated container \"keystone-db-init-0\" in pod \"keystone-db-init-z5mwz\" not found\nopenstack/keystone-db-sync-zsq8z/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"keystone-db-sync-zsq8z\" not found\nopenstack/keystone-db-sync-zsq8z/keystone-db-sync\nError from server (BadRequest): previous terminated container \"keystone-db-sync\" in pod \"keystone-db-sync-zsq8z\" not found\nopenstack/keystone-domain-manage-v865d/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"keystone-domain-manage-v865d\" not found\nopenstack/keystone-domain-manage-v865d/keystone-domain-manage\nError from server (BadRequest): previous terminated container \"keystone-domain-manage\" in pod \"keystone-domain-manage-v865d\" not found\nopenstack/keystone-fernet-setup-5rfqs/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"keystone-fernet-setup-5rfqs\" not found\nopenstack/keystone-fernet-setup-5rfqs/keystone-fernet-setup\nError from server (BadRequest): previous terminated container \"keystone-fernet-setup\" in pod \"keystone-fernet-setup-5rfqs\" not found\nopenstack/keystone-rabbit-init-m44qz/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"keystone-rabbit-init-m44qz\" not found\nopenstack/keystone-rabbit-init-m44qz/rabbit-init\nError from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"keystone-rabbit-init-m44qz\" not found\nopenstack/libvirt-libvirt-default-6bgrg/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"libvirt-libvirt-default-6bgrg\" not found\nopenstack/libvirt-libvirt-default-6bgrg/init-dynamic-options\nError from server (BadRequest): previous terminated container \"init-dynamic-options\" in pod \"libvirt-libvirt-default-6bgrg\" not found\nopenstack/libvirt-libvirt-default-6bgrg/ceph-admin-keyring-placement\nError from server (BadRequest): previous terminated container \"ceph-admin-keyring-placement\" in pod \"libvirt-libvirt-default-6bgrg\" not found\nopenstack/libvirt-libvirt-default-6bgrg/ceph-keyring-placement\nError from server (BadRequest): previous terminated container \"ceph-keyring-placement\" in pod \"libvirt-libvirt-default-6bgrg\" not found\nopenstack/libvirt-libvirt-default-6bgrg/tls-sidecar\nError from server (BadRequest): previous terminated container \"tls-sidecar\" in pod \"libvirt-libvirt-default-6bgrg\" not found\nopenstack/libvirt-libvirt-default-6bgrg/libvirt\nError from server (BadRequest): previous terminated container \"libvirt\" in pod \"libvirt-libvirt-default-6bgrg\" not found\nopenstack/libvirt-libvirt-default-6bgrg/libvirt-exporter\nError from server (BadRequest): previous terminated container \"libvirt-exporter\" in pod \"libvirt-libvirt-default-6bgrg\" not found\nopenstack/magnum-api-8549df7884-9b2zc/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"magnum-api-8549df7884-9b2zc\" not found\nopenstack/magnum-api-8549df7884-9b2zc/magnum-api\nError from server (BadRequest): previous terminated container \"magnum-api\" in pod \"magnum-api-8549df7884-9b2zc\" not found\nopenstack/magnum-cluster-api-proxy-z2flh/magnum-cluster-api-proxy\nError from server (BadRequest): previous terminated container \"magnum-cluster-api-proxy\" in pod \"magnum-cluster-api-proxy-z2flh\" not found\nopenstack/magnum-conductor-0/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"magnum-conductor-0\" not found\nopenstack/magnum-conductor-0/magnum-conductor-init\nError from server (BadRequest): previous terminated container \"magnum-conductor-init\" in pod \"magnum-conductor-0\" not found\nopenstack/magnum-conductor-0/magnum-conductor\nError from server (BadRequest): previous terminated container \"magnum-conductor\" in pod \"magnum-conductor-0\" not found\nopenstack/magnum-db-init-dshrc/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"magnum-db-init-dshrc\" not found\nopenstack/magnum-db-init-dshrc/magnum-db-init-0\nError from server (BadRequest): previous terminated container \"magnum-db-init-0\" in pod \"magnum-db-init-dshrc\" not found\nopenstack/magnum-db-sync-8ttpk/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"magnum-db-sync-8ttpk\" not found\nopenstack/magnum-db-sync-8ttpk/magnum-db-sync\nError from server (BadRequest): previous terminated container \"magnum-db-sync\" in pod \"magnum-db-sync-8ttpk\" not found\nopenstack/magnum-domain-ks-user-vp8f2/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"magnum-domain-ks-user-vp8f2\" not found\nopenstack/magnum-domain-ks-user-vp8f2/magnum-ks-domain-user\nError from server (BadRequest): previous terminated container \"magnum-ks-domain-user\" in pod \"magnum-domain-ks-user-vp8f2\" not found\nopenstack/magnum-ks-endpoints-jvzvf/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"magnum-ks-endpoints-jvzvf\" not found\nopenstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-admin\nError from server (BadRequest): previous terminated container \"container-infra-ks-endpoints-admin\" in pod \"magnum-ks-endpoints-jvzvf\" not found\nopenstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-internal\nError from server (BadRequest): previous terminated container \"container-infra-ks-endpoints-internal\" in pod \"magnum-ks-endpoints-jvzvf\" not found\nopenstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-public\nError from server (BadRequest): previous terminated container \"container-infra-ks-endpoints-public\" in pod \"magnum-ks-endpoints-jvzvf\" not found\nopenstack/magnum-ks-service-vdn67/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"magnum-ks-service-vdn67\" not found\nopenstack/magnum-ks-service-vdn67/container-infra-ks-service-registration\nError from server (BadRequest): previous terminated container \"container-infra-ks-service-registration\" in pod \"magnum-ks-service-vdn67\" not found\nopenstack/magnum-ks-user-4wvtj/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"magnum-ks-user-4wvtj\" not found\nopenstack/magnum-ks-user-4wvtj/magnum-ks-user\nError from server (BadRequest): previous terminated container \"magnum-ks-user\" in pod \"magnum-ks-user-4wvtj\" not found\nopenstack/magnum-rabbit-init-w7jc7/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"magnum-rabbit-init-w7jc7\" not found\nopenstack/magnum-rabbit-init-w7jc7/rabbit-init\nError from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"magnum-rabbit-init-w7jc7\" not found\nopenstack/magnum-registry-c45778976-2zz96/registry\nError from server (BadRequest): previous terminated container \"registry\" in pod \"magnum-registry-c45778976-2zz96\" not found\nopenstack/manila-api-5cdf958bd9-hmbmb/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"manila-api-5cdf958bd9-hmbmb\" not found\nopenstack/manila-api-5cdf958bd9-hmbmb/manila-api\nError from server (BadRequest): previous terminated container \"manila-api\" in pod \"manila-api-5cdf958bd9-hmbmb\" not found\nopenstack/manila-bootstrap-5wn97/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"manila-bootstrap-5wn97\" not found\nopenstack/manila-bootstrap-5wn97/bootstrap\nError from server (BadRequest): previous terminated container \"bootstrap\" in pod \"manila-bootstrap-5wn97\" not found\nopenstack/manila-data-75cbc955bd-27jjw/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"manila-data-75cbc955bd-27jjw\" not found\nopenstack/manila-data-75cbc955bd-27jjw/manila-data\nError from server (BadRequest): previous terminated container \"manila-data\" in pod \"manila-data-75cbc955bd-27jjw\" not found\nopenstack/manila-db-init-pbdm8/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"manila-db-init-pbdm8\" not found\nopenstack/manila-db-init-pbdm8/manila-db-init-0\nError from server (BadRequest): previous terminated container \"manila-db-init-0\" in pod \"manila-db-init-pbdm8\" not found\nopenstack/manila-db-sync-rm9mz/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"manila-db-sync-rm9mz\" not found\nopenstack/manila-db-sync-rm9mz/manila-db-sync\nError from server (BadRequest): previous terminated container \"manila-db-sync\" in pod \"manila-db-sync-rm9mz\" not found\nopenstack/manila-ks-endpoints-d8nr9/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"manila-ks-endpoints-d8nr9\" not found\nopenstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-admin\nError from server (BadRequest): previous terminated container \"share-ks-endpoints-admin\" in pod \"manila-ks-endpoints-d8nr9\" not found\nopenstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-internal\nError from server (BadRequest): previous terminated container \"share-ks-endpoints-internal\" in pod \"manila-ks-endpoints-d8nr9\" not found\nopenstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-public\nError from server (BadRequest): previous terminated container \"share-ks-endpoints-public\" in pod \"manila-ks-endpoints-d8nr9\" not found\nopenstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-admin\nError from server (BadRequest): previous terminated container \"sharev2-ks-endpoints-admin\" in pod \"manila-ks-endpoints-d8nr9\" not found\nopenstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-internal\nError from server (BadRequest): previous terminated container \"sharev2-ks-endpoints-internal\" in pod \"manila-ks-endpoints-d8nr9\" not found\nopenstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-public\nError from server (BadRequest): previous terminated container \"sharev2-ks-endpoints-public\" in pod \"manila-ks-endpoints-d8nr9\" not found\nopenstack/manila-ks-service-g7svt/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"manila-ks-service-g7svt\" not found\nopenstack/manila-ks-service-g7svt/share-ks-service-registration\nError from server (BadRequest): previous terminated container \"share-ks-service-registration\" in pod \"manila-ks-service-g7svt\" not found\nopenstack/manila-ks-service-g7svt/sharev2-ks-service-registration\nError from server (BadRequest): previous terminated container \"sharev2-ks-service-registration\" in pod \"manila-ks-service-g7svt\" not found\nopenstack/manila-ks-user-pr9mg/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"manila-ks-user-pr9mg\" not found\nopenstack/manila-ks-user-pr9mg/manila-ks-user\nError from server (BadRequest): previous terminated container \"manila-ks-user\" in pod \"manila-ks-user-pr9mg\" not found\nopenstack/manila-rabbit-init-74vjs/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"manila-rabbit-init-74vjs\" not found\nopenstack/manila-rabbit-init-74vjs/rabbit-init\nError from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"manila-rabbit-init-74vjs\" not found\nopenstack/manila-scheduler-5b584c8656-mmnnd/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"manila-scheduler-5b584c8656-mmnnd\" not found\nopenstack/manila-scheduler-5b584c8656-mmnnd/manila-scheduler\nError from server (BadRequest): previous terminated container \"manila-scheduler\" in pod \"manila-scheduler-5b584c8656-mmnnd\" not found\nopenstack/manila-share-68879775b-rc6q9/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"manila-share-68879775b-rc6q9\" not found\nopenstack/manila-share-68879775b-rc6q9/manila-share-init\nError from server (BadRequest): previous terminated container \"manila-share-init\" in pod \"manila-share-68879775b-rc6q9\" not found\nopenstack/manila-share-68879775b-rc6q9/manila-share\nError from server (BadRequest): previous terminated container \"manila-share\" in pod \"manila-share-68879775b-rc6q9\" not found\nopenstack/memcached-memcached-6479589586-9sxjx/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"memcached-memcached-6479589586-9sxjx\" not found\nopenstack/memcached-memcached-6479589586-9sxjx/memcached\nError from server (BadRequest): previous terminated container \"memcached\" in pod \"memcached-memcached-6479589586-9sxjx\" not found\nopenstack/memcached-memcached-6479589586-9sxjx/memcached-exporter\nError from server (BadRequest): previous terminated container \"memcached-exporter\" in pod \"memcached-memcached-6479589586-9sxjx\" not found\nopenstack/neutron-db-init-l7c9v/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"neutron-db-init-l7c9v\" not found\nopenstack/neutron-db-init-l7c9v/neutron-db-init-0\nError from server (BadRequest): previous terminated container \"neutron-db-init-0\" in pod \"neutron-db-init-l7c9v\" not found\nopenstack/neutron-db-sync-brwb5/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"neutron-db-sync-brwb5\" not found\nopenstack/neutron-db-sync-brwb5/neutron-db-sync\nError from server (BadRequest): previous terminated container \"neutron-db-sync\" in pod \"neutron-db-sync-brwb5\" not found\nopenstack/neutron-ks-endpoints-dstkg/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"neutron-ks-endpoints-dstkg\" not found\nopenstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-admin\nError from server (BadRequest): previous terminated container \"network-ks-endpoints-admin\" in pod \"neutron-ks-endpoints-dstkg\" not found\nopenstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-internal\nError from server (BadRequest): previous terminated container \"network-ks-endpoints-internal\" in pod \"neutron-ks-endpoints-dstkg\" not found\nopenstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-public\nError from server (BadRequest): previous terminated container \"network-ks-endpoints-public\" in pod \"neutron-ks-endpoints-dstkg\" not found\nopenstack/neutron-ks-service-sq4tp/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"neutron-ks-service-sq4tp\" not found\nopenstack/neutron-ks-service-sq4tp/network-ks-service-registration\nError from server (BadRequest): previous terminated container \"network-ks-service-registration\" in pod \"neutron-ks-service-sq4tp\" not found\nopenstack/neutron-ks-user-kcfc4/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"neutron-ks-user-kcfc4\" not found\nopenstack/neutron-ks-user-kcfc4/neutron-ks-user\nError from server (BadRequest): previous terminated container \"neutron-ks-user\" in pod \"neutron-ks-user-kcfc4\" not found\nopenstack/neutron-netns-cleanup-cron-default-8frwf/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"neutron-netns-cleanup-cron-default-8frwf\" not found\nopenstack/neutron-netns-cleanup-cron-default-8frwf/neutron-netns-cleanup-cron\nError from server (BadRequest): previous terminated container \"neutron-netns-cleanup-cron\" in pod \"neutron-netns-cleanup-cron-default-8frwf\" not found\nopenstack/neutron-ovn-metadata-agent-default-flhb5/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"neutron-ovn-metadata-agent-default-flhb5\" not found\nopenstack/neutron-ovn-metadata-agent-default-flhb5/neutron-metadata-agent-init\nError from server (BadRequest): previous terminated container \"neutron-metadata-agent-init\" in pod \"neutron-ovn-metadata-agent-default-flhb5\" not found\nopenstack/neutron-ovn-metadata-agent-default-flhb5/ovn-neutron-init\nError from server (BadRequest): previous terminated container \"ovn-neutron-init\" in pod \"neutron-ovn-metadata-agent-default-flhb5\" not found\nopenstack/neutron-ovn-metadata-agent-default-flhb5/neutron-ovn-metadata-agent\nError from server (BadRequest): previous terminated container \"neutron-ovn-metadata-agent\" in pod \"neutron-ovn-metadata-agent-default-flhb5\" not found\nopenstack/neutron-rabbit-init-rdnbf/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"neutron-rabbit-init-rdnbf\" not found\nopenstack/neutron-rabbit-init-rdnbf/rabbit-init\nError from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"neutron-rabbit-init-rdnbf\" not found\nopenstack/neutron-server-649c5974f6-5dkvl/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"neutron-server-649c5974f6-5dkvl\" not found\nopenstack/neutron-server-649c5974f6-5dkvl/ovn-neutron-init\nError from server (BadRequest): previous terminated container \"ovn-neutron-init\" in pod \"neutron-server-649c5974f6-5dkvl\" not found\nopenstack/neutron-server-649c5974f6-5dkvl/neutron-server\nError from server (BadRequest): previous terminated container \"neutron-server\" in pod \"neutron-server-649c5974f6-5dkvl\" not found\nopenstack/neutron-server-649c5974f6-5dkvl/neutron-policy-server\nError from server (BadRequest): previous terminated container \"neutron-policy-server\" in pod \"neutron-server-649c5974f6-5dkvl\" not found\nopenstack/nova-api-metadata-546d94ddd7-btnrc/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-api-metadata-546d94ddd7-btnrc\" not found\nopenstack/nova-api-metadata-546d94ddd7-btnrc/nova-api-metadata-init\nError from server (BadRequest): previous terminated container \"nova-api-metadata-init\" in pod \"nova-api-metadata-546d94ddd7-btnrc\" not found\nopenstack/nova-api-metadata-546d94ddd7-btnrc/nova-api\nError from server (BadRequest): previous terminated container \"nova-api\" in pod \"nova-api-metadata-546d94ddd7-btnrc\" not found\nopenstack/nova-api-osapi-99c7b7cd8-2lnzr/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-api-osapi-99c7b7cd8-2lnzr\" not found\nopenstack/nova-api-osapi-99c7b7cd8-2lnzr/nova-osapi\nError from server (BadRequest): previous terminated container \"nova-osapi\" in pod \"nova-api-osapi-99c7b7cd8-2lnzr\" not found\nopenstack/nova-bootstrap-trzqq/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-bootstrap-trzqq\" not found\nopenstack/nova-bootstrap-trzqq/bootstrap\nError from server (BadRequest): previous terminated container \"bootstrap\" in pod \"nova-bootstrap-trzqq\" not found\nopenstack/nova-cell-setup-29540040-rtzd7/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-cell-setup-29540040-rtzd7\" not found\nopenstack/nova-cell-setup-29540040-rtzd7/nova-cell-setup\nError from server (BadRequest): previous terminated container \"nova-cell-setup\" in pod \"nova-cell-setup-29540040-rtzd7\" not found\nopenstack/nova-cell-setup-j97qh/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-cell-setup-j97qh\" not found\nopenstack/nova-cell-setup-j97qh/nova-cell-setup-init\nError from server (BadRequest): previous terminated container \"nova-cell-setup-init\" in pod \"nova-cell-setup-j97qh\" not found\nopenstack/nova-cell-setup-j97qh/nova-cell-setup\nError from server (BadRequest): previous terminated container \"nova-cell-setup\" in pod \"nova-cell-setup-j97qh\" not found\nopenstack/nova-compute-default-2v5pd/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-compute-default-2v5pd\" not found\nopenstack/nova-compute-default-2v5pd/nova-compute-init\nError from server (BadRequest): previous terminated container \"nova-compute-init\" in pod \"nova-compute-default-2v5pd\" not found\nopenstack/nova-compute-default-2v5pd/ceph-perms\nError from server (BadRequest): previous terminated container \"ceph-perms\" in pod \"nova-compute-default-2v5pd\" not found\nopenstack/nova-compute-default-2v5pd/ceph-admin-keyring-placement\nError from server (BadRequest): previous terminated container \"ceph-admin-keyring-placement\" in pod \"nova-compute-default-2v5pd\" not found\nopenstack/nova-compute-default-2v5pd/ceph-keyring-placement\nError from server (BadRequest): previous terminated container \"ceph-keyring-placement\" in pod \"nova-compute-default-2v5pd\" not found\nopenstack/nova-compute-default-2v5pd/nova-compute-vnc-init\nError from server (BadRequest): previous terminated container \"nova-compute-vnc-init\" in pod \"nova-compute-default-2v5pd\" not found\nopenstack/nova-compute-default-2v5pd/nova-compute-ssh-init\nError from server (BadRequest): previous terminated container \"nova-compute-ssh-init\" in pod \"nova-compute-default-2v5pd\" not found\nopenstack/nova-compute-default-2v5pd/nova-compute\nError from server (BadRequest): previous terminated container \"nova-compute\" in pod \"nova-compute-default-2v5pd\" not found\nopenstack/nova-compute-default-2v5pd/nova-compute-ssh\nError from server (BadRequest): previous terminated container \"nova-compute-ssh\" in pod \"nova-compute-default-2v5pd\" not found\nopenstack/nova-conductor-5474cb4b8d-bxzhq/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-conductor-5474cb4b8d-bxzhq\" not found\nopenstack/nova-conductor-5474cb4b8d-bxzhq/nova-conductor\nError from server (BadRequest): previous terminated container \"nova-conductor\" in pod \"nova-conductor-5474cb4b8d-bxzhq\" not found\nopenstack/nova-db-init-b4sqh/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-db-init-b4sqh\" not found\nopenstack/nova-db-init-b4sqh/nova-db-init-0\nError from server (BadRequest): previous terminated container \"nova-db-init-0\" in pod \"nova-db-init-b4sqh\" not found\nopenstack/nova-db-init-b4sqh/nova-db-init-1\nError from server (BadRequest): previous terminated container \"nova-db-init-1\" in pod \"nova-db-init-b4sqh\" not found\nopenstack/nova-db-init-b4sqh/nova-db-init-2\nError from server (BadRequest): previous terminated container \"nova-db-init-2\" in pod \"nova-db-init-b4sqh\" not found\nopenstack/nova-db-sync-2rbjc/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-db-sync-2rbjc\" not found\nopenstack/nova-db-sync-2rbjc/nova-db-sync\nError from server (BadRequest): previous terminated container \"nova-db-sync\" in pod \"nova-db-sync-2rbjc\" not found\nopenstack/nova-ks-endpoints-zwcm6/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-ks-endpoints-zwcm6\" not found\nopenstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-admin\nError from server (BadRequest): previous terminated container \"compute-ks-endpoints-admin\" in pod \"nova-ks-endpoints-zwcm6\" not found\nopenstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-internal\nError from server (BadRequest): previous terminated container \"compute-ks-endpoints-internal\" in pod \"nova-ks-endpoints-zwcm6\" not found\nopenstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-public\nError from server (BadRequest): previous terminated container \"compute-ks-endpoints-public\" in pod \"nova-ks-endpoints-zwcm6\" not found\nopenstack/nova-ks-service-fmj77/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-ks-service-fmj77\" not found\nopenstack/nova-ks-service-fmj77/compute-ks-service-registration\nError from server (BadRequest): previous terminated container \"compute-ks-service-registration\" in pod \"nova-ks-service-fmj77\" not found\nopenstack/nova-ks-user-t8xgz/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-ks-user-t8xgz\" not found\nopenstack/nova-ks-user-t8xgz/nova-ks-user\nError from server (BadRequest): previous terminated container \"nova-ks-user\" in pod \"nova-ks-user-t8xgz\" not found\nopenstack/nova-novncproxy-85dd5b5965-z6hmj/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-novncproxy-85dd5b5965-z6hmj\" not found\nopenstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy-init\nError from server (BadRequest): previous terminated container \"nova-novncproxy-init\" in pod \"nova-novncproxy-85dd5b5965-z6hmj\" not found\nopenstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy-init-assets\nError from server (BadRequest): previous terminated container \"nova-novncproxy-init-assets\" in pod \"nova-novncproxy-85dd5b5965-z6hmj\" not found\nopenstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy\nError from server (BadRequest): previous terminated container \"nova-novncproxy\" in pod \"nova-novncproxy-85dd5b5965-z6hmj\" not found\nopenstack/nova-rabbit-init-szpvx/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-rabbit-init-szpvx\" not found\nopenstack/nova-rabbit-init-szpvx/rabbit-init\nError from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"nova-rabbit-init-szpvx\" not found\nopenstack/nova-scheduler-78775555d4-hb2j9/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-scheduler-78775555d4-hb2j9\" not found\nopenstack/nova-scheduler-78775555d4-hb2j9/nova-scheduler\nError from server (BadRequest): previous terminated container \"nova-scheduler\" in pod \"nova-scheduler-78775555d4-hb2j9\" not found\nopenstack/nova-service-cleaner-29540040-cxdd4/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"nova-service-cleaner-29540040-cxdd4\" not found\nopenstack/nova-service-cleaner-29540040-cxdd4/nova-service-cleaner\nError from server (BadRequest): previous terminated container \"nova-service-cleaner\" in pod \"nova-service-cleaner-29540040-cxdd4\" not found\nopenstack/octavia-api-75db6578cf-m656r/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"octavia-api-75db6578cf-m656r\" not found\nopenstack/octavia-api-75db6578cf-m656r/octavia-api\nError from server (BadRequest): previous terminated container \"octavia-api\" in pod \"octavia-api-75db6578cf-m656r\" not found\nopenstack/octavia-bootstrap-kwfv2/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"octavia-bootstrap-kwfv2\" not found\nopenstack/octavia-bootstrap-kwfv2/bootstrap\nError from server (BadRequest): previous terminated container \"bootstrap\" in pod \"octavia-bootstrap-kwfv2\" not found\nopenstack/octavia-db-init-wnz5h/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"octavia-db-init-wnz5h\" not found\nopenstack/octavia-db-init-wnz5h/octavia-db-init-0\nError from server (BadRequest): previous terminated container \"octavia-db-init-0\" in pod \"octavia-db-init-wnz5h\" not found\nopenstack/octavia-db-init-wnz5h/octavia-db-init-1\nError from server (BadRequest): previous terminated container \"octavia-db-init-1\" in pod \"octavia-db-init-wnz5h\" not found\nopenstack/octavia-db-sync-rjq45/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"octavia-db-sync-rjq45\" not found\nopenstack/octavia-db-sync-rjq45/octavia-db-sync\nError from server (BadRequest): previous terminated container \"octavia-db-sync\" in pod \"octavia-db-sync-rjq45\" not found\nopenstack/octavia-health-manager-default-twmks/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"octavia-health-manager-default-twmks\" not found\nopenstack/octavia-health-manager-default-twmks/octavia-health-manager-get-port\nError from server (BadRequest): previous terminated container \"octavia-health-manager-get-port\" in pod \"octavia-health-manager-default-twmks\" not found\nopenstack/octavia-health-manager-default-twmks/octavia-health-manager-nic-init\nError from server (BadRequest): previous terminated container \"octavia-health-manager-nic-init\" in pod \"octavia-health-manager-default-twmks\" not found\nopenstack/octavia-health-manager-default-twmks/octavia-health-manager\nError from server (BadRequest): previous terminated container \"octavia-health-manager\" in pod \"octavia-health-manager-default-twmks\" not found\nopenstack/octavia-housekeeping-87b98c47b-vqwct/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"octavia-housekeeping-87b98c47b-vqwct\" not found\nopenstack/octavia-housekeeping-87b98c47b-vqwct/octavia-housekeeping\nError from server (BadRequest): previous terminated container \"octavia-housekeeping\" in pod \"octavia-housekeeping-87b98c47b-vqwct\" not found\nopenstack/octavia-ks-endpoints-jdlzw/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"octavia-ks-endpoints-jdlzw\" not found\nopenstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-admin\nError from server (BadRequest): previous terminated container \"load-balancer-ks-endpoints-admin\" in pod \"octavia-ks-endpoints-jdlzw\" not found\nopenstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-internal\nError from server (BadRequest): previous terminated container \"load-balancer-ks-endpoints-internal\" in pod \"octavia-ks-endpoints-jdlzw\" not found\nopenstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-public\nError from server (BadRequest): previous terminated container \"load-balancer-ks-endpoints-public\" in pod \"octavia-ks-endpoints-jdlzw\" not found\nopenstack/octavia-ks-service-rkdp9/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"octavia-ks-service-rkdp9\" not found\nopenstack/octavia-ks-service-rkdp9/load-balancer-ks-service-registration\nError from server (BadRequest): previous terminated container \"load-balancer-ks-service-registration\" in pod \"octavia-ks-service-rkdp9\" not found\nopenstack/octavia-ks-user-tjl52/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"octavia-ks-user-tjl52\" not found\nopenstack/octavia-ks-user-tjl52/octavia-ks-user\nError from server (BadRequest): previous terminated container \"octavia-ks-user\" in pod \"octavia-ks-user-tjl52\" not found\nopenstack/octavia-rabbit-init-vdqxf/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"octavia-rabbit-init-vdqxf\" not found\nopenstack/octavia-rabbit-init-vdqxf/rabbit-init\nError from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"octavia-rabbit-init-vdqxf\" not found\nopenstack/octavia-worker-774cddbcdc-qxl6k/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"octavia-worker-774cddbcdc-qxl6k\" not found\nopenstack/octavia-worker-774cddbcdc-qxl6k/octavia-worker\nError from server (BadRequest): previous terminated container \"octavia-worker\" in pod \"octavia-worker-774cddbcdc-qxl6k\" not found\nopenstack/openstack-database-exporter-7c944bc9f-w2bdb/openstack-database-exporter\nError from server (BadRequest): previous terminated container \"openstack-database-exporter\" in pod \"openstack-database-exporter-7c944bc9f-w2bdb\" not found\nopenstack/openstack-exporter-74676fb4b4-jrkwh/build-config\nError from server (BadRequest): previous terminated container \"build-config\" in pod \"openstack-exporter-74676fb4b4-jrkwh\" not found\nopenstack/openstack-exporter-74676fb4b4-jrkwh/openstack-exporter\nError from server (BadRequest): previous terminated container \"openstack-exporter\" in pod \"openstack-exporter-74676fb4b4-jrkwh\" not found\nopenstack/openvswitch-gj98d/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"openvswitch-gj98d\" not found\nopenstack/openvswitch-gj98d/openvswitch-db-perms\nError from server (BadRequest): previous terminated container \"openvswitch-db-perms\" in pod \"openvswitch-gj98d\" not found\nopenstack/openvswitch-gj98d/openvswitch-vswitchd-modules\nError from server (BadRequest): previous terminated container \"openvswitch-vswitchd-modules\" in pod \"openvswitch-gj98d\" not found\nopenstack/openvswitch-gj98d/openvswitch-db\nError from server (BadRequest): previous terminated container \"openvswitch-db\" in pod \"openvswitch-gj98d\" not found\nopenstack/openvswitch-gj98d/openvswitch-vswitchd\nError from server (BadRequest): previous terminated container \"openvswitch-vswitchd\" in pod \"openvswitch-gj98d\" not found\nopenstack/ovn-controller-6mbd4/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"ovn-controller-6mbd4\" not found\nopenstack/ovn-controller-6mbd4/get-gw-enabled\nError from server (BadRequest): previous terminated container \"get-gw-enabled\" in pod \"ovn-controller-6mbd4\" not found\nopenstack/ovn-controller-6mbd4/controller-init\nError from server (BadRequest): previous terminated container \"controller-init\" in pod \"ovn-controller-6mbd4\" not found\nopenstack/ovn-controller-6mbd4/controller\nError from server (BadRequest): previous terminated container \"controller\" in pod \"ovn-controller-6mbd4\" not found\nopenstack/ovn-controller-6mbd4/vector\nError from server (BadRequest): previous terminated container \"vector\" in pod \"ovn-controller-6mbd4\" not found\nopenstack/ovn-controller-6mbd4/log-parser\nError from server (BadRequest): previous terminated container \"log-parser\" in pod \"ovn-controller-6mbd4\" not found\nopenstack/ovn-northd-6c6687ddd6-7grhs/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"ovn-northd-6c6687ddd6-7grhs\" not found\nopenstack/ovn-northd-6c6687ddd6-7grhs/northd\nError from server (BadRequest): previous terminated container \"northd\" in pod \"ovn-northd-6c6687ddd6-7grhs\" not found\nopenstack/ovn-ovsdb-nb-0/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"ovn-ovsdb-nb-0\" not found\nopenstack/ovn-ovsdb-nb-0/ovsdb\nError from server (BadRequest): previous terminated container \"ovsdb\" in pod \"ovn-ovsdb-nb-0\" not found\nopenstack/ovn-ovsdb-sb-0/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"ovn-ovsdb-sb-0\" not found\nopenstack/ovn-ovsdb-sb-0/ovsdb\nError from server (BadRequest): previous terminated container \"ovsdb\" in pod \"ovn-ovsdb-sb-0\" not found\nopenstack/percona-xtradb-haproxy-0/pxc-init\nError from server (BadRequest): previous terminated container \"pxc-init\" in pod \"percona-xtradb-haproxy-0\" not found\nopenstack/percona-xtradb-haproxy-0/haproxy-init\nError from server (BadRequest): previous terminated container \"haproxy-init\" in pod \"percona-xtradb-haproxy-0\" not found\nopenstack/percona-xtradb-haproxy-0/haproxy\nError from server (BadRequest): previous terminated container \"haproxy\" in pod \"percona-xtradb-haproxy-0\" not found\nopenstack/percona-xtradb-haproxy-0/pxc-monit\nError from server (BadRequest): previous terminated container \"pxc-monit\" in pod \"percona-xtradb-haproxy-0\" not found\nopenstack/percona-xtradb-pxc-0/pxc-init\nError from server (BadRequest): previous terminated container \"pxc-init\" in pod \"percona-xtradb-pxc-0\" not found\nopenstack/percona-xtradb-pxc-0/pxc\nError from server (BadRequest): previous terminated container \"pxc\" in pod \"percona-xtradb-pxc-0\" not found\nopenstack/percona-xtradb-pxc-0/exporter\nError from server (BadRequest): previous terminated container \"exporter\" in pod \"percona-xtradb-pxc-0\" not found\nopenstack/placement-api-75695696c6-brsxj/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"placement-api-75695696c6-brsxj\" not found\nopenstack/placement-api-75695696c6-brsxj/placement-api\nError from server (BadRequest): previous terminated container \"placement-api\" in pod \"placement-api-75695696c6-brsxj\" not found\nopenstack/placement-db-init-89t92/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"placement-db-init-89t92\" not found\nopenstack/placement-db-init-89t92/placement-db-init-0\nError from server (BadRequest): previous terminated container \"placement-db-init-0\" in pod \"placement-db-init-89t92\" not found\nopenstack/placement-db-sync-nvqjv/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"placement-db-sync-nvqjv\" not found\nopenstack/placement-db-sync-nvqjv/placement-db-sync\nError from server (BadRequest): previous terminated container \"placement-db-sync\" in pod \"placement-db-sync-nvqjv\" not found\nopenstack/placement-ks-endpoints-jmfl7/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"placement-ks-endpoints-jmfl7\" not found\nopenstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-admin\nError from server (BadRequest): previous terminated container \"placement-ks-endpoints-admin\" in pod \"placement-ks-endpoints-jmfl7\" not found\nopenstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-internal\nError from server (BadRequest): previous terminated container \"placement-ks-endpoints-internal\" in pod \"placement-ks-endpoints-jmfl7\" not found\nopenstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-public\nError from server (BadRequest): previous terminated container \"placement-ks-endpoints-public\" in pod \"placement-ks-endpoints-jmfl7\" not found\nopenstack/placement-ks-service-qdjdz/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"placement-ks-service-qdjdz\" not found\nopenstack/placement-ks-service-qdjdz/placement-ks-service-registration\nError from server (BadRequest): previous terminated container \"placement-ks-service-registration\" in pod \"placement-ks-service-qdjdz\" not found\nopenstack/placement-ks-user-blkn9/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"placement-ks-user-blkn9\" not found\nopenstack/placement-ks-user-blkn9/placement-ks-user\nError from server (BadRequest): previous terminated container \"placement-ks-user\" in pod \"placement-ks-user-blkn9\" not found\nopenstack/pxc-operator-7cff949c8b-7zp4j/percona-xtradb-cluster-operator\nError from server (BadRequest): previous terminated container \"percona-xtradb-cluster-operator\" in pod \"pxc-operator-7cff949c8b-7zp4j\" not found\nopenstack/rabbitmq-barbican-server-0/setup-container\nError from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-barbican-server-0\" not found\nopenstack/rabbitmq-barbican-server-0/rabbitmq\nError from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-barbican-server-0\" not found\nopenstack/rabbitmq-cinder-server-0/setup-container\nError from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-cinder-server-0\" not found\nopenstack/rabbitmq-cinder-server-0/rabbitmq\nError from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-cinder-server-0\" not found\nopenstack/rabbitmq-cluster-operator-5448d56d95-vk9km/rabbitmq-cluster-operator\nError from server (BadRequest): previous terminated container \"rabbitmq-cluster-operator\" in pod \"rabbitmq-cluster-operator-5448d56d95-vk9km\" not found\nopenstack/rabbitmq-glance-server-0/setup-container\nError from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-glance-server-0\" not found\nopenstack/rabbitmq-glance-server-0/rabbitmq\nError from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-glance-server-0\" not found\nopenstack/rabbitmq-heat-server-0/setup-container\nError from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-heat-server-0\" not found\nopenstack/rabbitmq-heat-server-0/rabbitmq\nError from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-heat-server-0\" not found\nopenstack/rabbitmq-keystone-server-0/setup-container\nError from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-keystone-server-0\" not found\nopenstack/rabbitmq-keystone-server-0/rabbitmq\nError from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-keystone-server-0\" not found\nopenstack/rabbitmq-magnum-server-0/setup-container\nError from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-magnum-server-0\" not found\nopenstack/rabbitmq-magnum-server-0/rabbitmq\nError from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-magnum-server-0\" not found\nopenstack/rabbitmq-manila-server-0/setup-container\nError from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-manila-server-0\" not found\nopenstack/rabbitmq-manila-server-0/rabbitmq\nError from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-manila-server-0\" not found\nopenstack/rabbitmq-messaging-topology-operator-7f8596f788-84l9x/rabbitmq-cluster-operator\nError from server (BadRequest): previous terminated container \"rabbitmq-cluster-operator\" in pod \"rabbitmq-messaging-topology-operator-7f8596f788-84l9x\" not found\nopenstack/rabbitmq-neutron-server-0/setup-container\nError from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-neutron-server-0\" not found\nopenstack/rabbitmq-neutron-server-0/rabbitmq\nError from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-neutron-server-0\" not found\nopenstack/rabbitmq-nova-server-0/setup-container\nError from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-nova-server-0\" not found\nopenstack/rabbitmq-nova-server-0/rabbitmq\nError from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-nova-server-0\" not found\nopenstack/rabbitmq-octavia-server-0/setup-container\nError from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-octavia-server-0\" not found\nopenstack/rabbitmq-octavia-server-0/rabbitmq\nError from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-octavia-server-0\" not found\nopenstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/make-container-crash-dir\nError from server (BadRequest): previous terminated container \"make-container-crash-dir\" in pod \"rook-ceph-crashcollector-instance-754c646bfd-htxl9\" not found\nopenstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/chown-container-data-dir\nError from server (BadRequest): previous terminated container \"chown-container-data-dir\" in pod \"rook-ceph-crashcollector-instance-754c646bfd-htxl9\" not found\nopenstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/ceph-crash\nError from server (BadRequest): previous terminated container \"ceph-crash\" in pod \"rook-ceph-crashcollector-instance-754c646bfd-htxl9\" not found\nopenstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/chown-container-data-dir\nError from server (BadRequest): previous terminated container \"chown-container-data-dir\" in pod \"rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw\" not found\nopenstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/rgw\nError from server (BadRequest): previous terminated container \"rgw\" in pod \"rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw\" not found\nopenstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/log-collector\nError from server (BadRequest): previous terminated container \"log-collector\" in pod \"rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw\" not found\nopenstack/staffeln-api-6669c8779f-qgp4c/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"staffeln-api-6669c8779f-qgp4c\" not found\nopenstack/staffeln-api-6669c8779f-qgp4c/staffeln-api\nError from server (BadRequest): previous terminated container \"staffeln-api\" in pod \"staffeln-api-6669c8779f-qgp4c\" not found\nopenstack/staffeln-conductor-7b5d99bcd4-ws4sl/staffeln-conductor\nError from server (BadRequest): previous terminated container \"staffeln-conductor\" in pod \"staffeln-conductor-7b5d99bcd4-ws4sl\" not found\nopenstack/staffeln-db-init-p4pq4/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"staffeln-db-init-p4pq4\" not found\nopenstack/staffeln-db-init-p4pq4/staffeln-db-init-0\nError from server (BadRequest): previous terminated container \"staffeln-db-init-0\" in pod \"staffeln-db-init-p4pq4\" not found\nopenstack/staffeln-db-sync-khzx8/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"staffeln-db-sync-khzx8\" not found\nopenstack/staffeln-db-sync-khzx8/staffeln-db-sync\nError from server (BadRequest): previous terminated container \"staffeln-db-sync\" in pod \"staffeln-db-sync-khzx8\" not found\nopenstack/tempest-ks-user-kwbf6/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"tempest-ks-user-kwbf6\" not found\nopenstack/tempest-ks-user-kwbf6/tempest-ks-user\nError from server (BadRequest): previous terminated container \"tempest-ks-user\" in pod \"tempest-ks-user-kwbf6\" not found\nopenstack/tempest-run-tests-g5plh/init\nError from server (BadRequest): previous terminated container \"init\" in pod \"tempest-run-tests-g5plh\" not found\nopenstack/tempest-run-tests-g5plh/tempest-run-tests-init\nError from server (BadRequest): previous terminated container \"tempest-run-tests-init\" in pod \"tempest-run-tests-g5plh\" not found\nopenstack/tempest-run-tests-g5plh/tempest-run-tests\nError from server (BadRequest): previous terminated container \"tempest-run-tests\" in pod \"tempest-run-tests-g5plh\" not found\nopenstack/valkey-node-0/valkey\nError from server (BadRequest): previous terminated container \"valkey\" in pod \"valkey-node-0\" not found\nopenstack/valkey-node-0/sentinel\nError from server (BadRequest): previous terminated container \"sentinel\" in pod \"valkey-node-0\" not found\nopenstack/valkey-node-0/metrics\nError from server (BadRequest): previous terminated container \"metrics\" in pod \"valkey-node-0\" not found\norc-system/orc-controller-manager-6cb597b5d4-glhcz/manager\nError from server (BadRequest): previous terminated container \"manager\" in pod \"orc-controller-manager-6cb597b5d4-glhcz\" not found\nrook-ceph/rook-ceph-operator-7b66cfb94c-tj94j/rook-ceph-operator\nError from server (BadRequest): previous terminated container \"rook-ceph-operator\" in pod \"rook-ceph-operator-7b66cfb94c-tj94j\" not found\nsecretgen-controller/secretgen-controller-5cf976ccc7-szs5h/secretgen-controller\nError from server (BadRequest): previous terminated container \"secretgen-controller\" in pod \"secretgen-controller-5cf976ccc7-szs5h\" not found",
                            "stdout_lines": [
                                "auth-system/keycloak-0/keycloak",
                                "Error from server (BadRequest): previous terminated container \"keycloak\" in pod \"keycloak-0\" not found",
                                "capi-kubeadm-bootstrap-system/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k/manager",
                                "Error from server (BadRequest): previous terminated container \"manager\" in pod \"capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k\" not found",
                                "capi-kubeadm-control-plane-system/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4/manager",
                                "Error from server (BadRequest): previous terminated container \"manager\" in pod \"capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4\" not found",
                                "capi-system/capi-controller-manager-bc4cf8c95-w8p6b/manager",
                                "Error from server (BadRequest): previous terminated container \"manager\" in pod \"capi-controller-manager-bc4cf8c95-w8p6b\" not found",
                                "capo-system/capo-controller-manager-6975759b4b-tkxrs/manager",
                                "Error from server (BadRequest): previous terminated container \"manager\" in pod \"capo-controller-manager-6975759b4b-tkxrs\" not found",
                                "cert-manager/cert-manager-75c4c745bc-45s4r/cert-manager-controller",
                                "Error from server (BadRequest): previous terminated container \"cert-manager-controller\" in pod \"cert-manager-75c4c745bc-45s4r\" not found",
                                "cert-manager/cert-manager-cainjector-64b59ddb75-tl5x7/cert-manager-cainjector",
                                "Error from server (BadRequest): previous terminated container \"cert-manager-cainjector\" in pod \"cert-manager-cainjector-64b59ddb75-tl5x7\" not found",
                                "cert-manager/cert-manager-webhook-548949fc64-vkrlt/cert-manager-webhook",
                                "Error from server (BadRequest): previous terminated container \"cert-manager-webhook\" in pod \"cert-manager-webhook-548949fc64-vkrlt\" not found",
                                "ingress-nginx/ingress-nginx-controller-j4bqv/controller",
                                "Error from server (BadRequest): previous terminated container \"controller\" in pod \"ingress-nginx-controller-j4bqv\" not found",
                                "ingress-nginx/ingress-nginx-defaultbackend-6987ff55cf-gpx4l/ingress-nginx-default-backend",
                                "Error from server (BadRequest): previous terminated container \"ingress-nginx-default-backend\" in pod \"ingress-nginx-defaultbackend-6987ff55cf-gpx4l\" not found",
                                "kube-system/cilium-operator-869df985b8-kszk2/cilium-operator",
                                "Error from server (BadRequest): previous terminated container \"cilium-operator\" in pod \"cilium-operator-869df985b8-kszk2\" not found",
                                "kube-system/cilium-vdz4f/config",
                                "Error from server (BadRequest): previous terminated container \"config\" in pod \"cilium-vdz4f\" not found",
                                "kube-system/cilium-vdz4f/mount-cgroup",
                                "Error from server (BadRequest): previous terminated container \"mount-cgroup\" in pod \"cilium-vdz4f\" not found",
                                "kube-system/cilium-vdz4f/apply-sysctl-overwrites",
                                "Error from server (BadRequest): previous terminated container \"apply-sysctl-overwrites\" in pod \"cilium-vdz4f\" not found",
                                "kube-system/cilium-vdz4f/mount-bpf-fs",
                                "Error from server (BadRequest): previous terminated container \"mount-bpf-fs\" in pod \"cilium-vdz4f\" not found",
                                "kube-system/cilium-vdz4f/clean-cilium-state",
                                "Error from server (BadRequest): previous terminated container \"clean-cilium-state\" in pod \"cilium-vdz4f\" not found",
                                "kube-system/cilium-vdz4f/install-cni-binaries",
                                "Error from server (BadRequest): previous terminated container \"install-cni-binaries\" in pod \"cilium-vdz4f\" not found",
                                "kube-system/cilium-vdz4f/cilium-agent",
                                "Error from server (BadRequest): previous terminated container \"cilium-agent\" in pod \"cilium-vdz4f\" not found",
                                "kube-system/coredns-67659f764b-6f2mm/coredns",
                                "Error from server (BadRequest): previous terminated container \"coredns\" in pod \"coredns-67659f764b-6f2mm\" not found",
                                "kube-system/coredns-67659f764b-j6fp4/coredns",
                                "Error from server (BadRequest): previous terminated container \"coredns\" in pod \"coredns-67659f764b-j6fp4\" not found",
                                "kube-system/etcd-instance/etcd",
                                "Error from server (BadRequest): previous terminated container \"etcd\" in pod \"etcd-instance\" not found",
                                "kube-system/kube-apiserver-instance/kube-apiserver",
                                "Error from server (BadRequest): previous terminated container \"kube-apiserver\" in pod \"kube-apiserver-instance\" not found",
                                "kube-system/kube-controller-manager-instance/kube-controller-manager",
                                "Error from server (BadRequest): previous terminated container \"kube-controller-manager\" in pod \"kube-controller-manager-instance\" not found",
                                "kube-system/kube-proxy-sp2vs/kube-proxy",
                                "Error from server (BadRequest): previous terminated container \"kube-proxy\" in pod \"kube-proxy-sp2vs\" not found",
                                "kube-system/kube-scheduler-instance/kube-scheduler",
                                "Error from server (BadRequest): previous terminated container \"kube-scheduler\" in pod \"kube-scheduler-instance\" not found",
                                "kube-system/kube-vip-instance/kube-vip",
                                "Error from server (BadRequest): previous terminated container \"kube-vip\" in pod \"kube-vip-instance\" not found",
                                "local-path-storage/local-path-provisioner-679c578f5-7h8w5/local-path-provisioner",
                                "Error from server (BadRequest): previous terminated container \"local-path-provisioner\" in pod \"local-path-provisioner-679c578f5-7h8w5\" not found",
                                "monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/init-config-reloader",
                                "Error from server (BadRequest): previous terminated container \"init-config-reloader\" in pod \"alertmanager-kube-prometheus-stack-alertmanager-0\" not found",
                                "monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/alertmanager",
                                "Error from server (BadRequest): previous terminated container \"alertmanager\" in pod \"alertmanager-kube-prometheus-stack-alertmanager-0\" not found",
                                "monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/config-reloader",
                                "Error from server (BadRequest): previous terminated container \"config-reloader\" in pod \"alertmanager-kube-prometheus-stack-alertmanager-0\" not found",
                                "monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/oauth2-proxy",
                                "Error from server (BadRequest): previous terminated container \"oauth2-proxy\" in pod \"alertmanager-kube-prometheus-stack-alertmanager-0\" not found",
                                "monitoring/goldpinger-7jzp8/goldpinger-daemon",
                                "Error from server (BadRequest): previous terminated container \"goldpinger-daemon\" in pod \"goldpinger-7jzp8\" not found",
                                "monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana-sc-dashboard",
                                "Error from server (BadRequest): previous terminated container \"grafana-sc-dashboard\" in pod \"kube-prometheus-stack-grafana-668bfb9659-ft52b\" not found",
                                "monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana-sc-datasources",
                                "Error from server (BadRequest): previous terminated container \"grafana-sc-datasources\" in pod \"kube-prometheus-stack-grafana-668bfb9659-ft52b\" not found",
                                "monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana",
                                "Error from server (BadRequest): previous terminated container \"grafana\" in pod \"kube-prometheus-stack-grafana-668bfb9659-ft52b\" not found",
                                "monitoring/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m/kube-state-metrics",
                                "Error from server (BadRequest): previous terminated container \"kube-state-metrics\" in pod \"kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m\" not found",
                                "monitoring/kube-prometheus-stack-operator-cd88cf4bf-lzh7g/kube-prometheus-stack",
                                "Error from server (BadRequest): previous terminated container \"kube-prometheus-stack\" in pod \"kube-prometheus-stack-operator-cd88cf4bf-lzh7g\" not found",
                                "monitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/node-exporter",
                                "monitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/pod-tls-sidecar",
                                "Error from server (BadRequest): previous terminated container \"pod-tls-sidecar\" in pod \"kube-prometheus-stack-prometheus-node-exporter-59qlm\" not found",
                                "monitoring/loki-0/loki",
                                "Error from server (BadRequest): previous terminated container \"loki\" in pod \"loki-0\" not found",
                                "monitoring/loki-chunks-cache-0/memcached",
                                "Error from server (BadRequest): previous terminated container \"memcached\" in pod \"loki-chunks-cache-0\" not found",
                                "monitoring/loki-chunks-cache-0/exporter",
                                "Error from server (BadRequest): previous terminated container \"exporter\" in pod \"loki-chunks-cache-0\" not found",
                                "monitoring/loki-gateway-cf54cb88c-zv654/nginx",
                                "Error from server (BadRequest): previous terminated container \"nginx\" in pod \"loki-gateway-cf54cb88c-zv654\" not found",
                                "monitoring/loki-results-cache-0/memcached",
                                "Error from server (BadRequest): previous terminated container \"memcached\" in pod \"loki-results-cache-0\" not found",
                                "monitoring/loki-results-cache-0/exporter",
                                "Error from server (BadRequest): previous terminated container \"exporter\" in pod \"loki-results-cache-0\" not found",
                                "monitoring/node-feature-discovery-gc-6675cbb6d9-zv9sn/gc",
                                "Error from server (BadRequest): previous terminated container \"gc\" in pod \"node-feature-discovery-gc-6675cbb6d9-zv9sn\" not found",
                                "monitoring/node-feature-discovery-master-8665476dbc-t4z5z/master",
                                "Error from server (BadRequest): previous terminated container \"master\" in pod \"node-feature-discovery-master-8665476dbc-t4z5z\" not found",
                                "monitoring/node-feature-discovery-worker-p8lmk/worker",
                                "Error from server (BadRequest): previous terminated container \"worker\" in pod \"node-feature-discovery-worker-p8lmk\" not found",
                                "monitoring/prometheus-kube-prometheus-stack-prometheus-0/init-config-reloader",
                                "Error from server (BadRequest): previous terminated container \"init-config-reloader\" in pod \"prometheus-kube-prometheus-stack-prometheus-0\" not found",
                                "monitoring/prometheus-kube-prometheus-stack-prometheus-0/prometheus",
                                "Error from server (BadRequest): previous terminated container \"prometheus\" in pod \"prometheus-kube-prometheus-stack-prometheus-0\" not found",
                                "monitoring/prometheus-kube-prometheus-stack-prometheus-0/config-reloader",
                                "Error from server (BadRequest): previous terminated container \"config-reloader\" in pod \"prometheus-kube-prometheus-stack-prometheus-0\" not found",
                                "monitoring/prometheus-kube-prometheus-stack-prometheus-0/pod-tls-sidecar",
                                "Error from server (BadRequest): previous terminated container \"pod-tls-sidecar\" in pod \"prometheus-kube-prometheus-stack-prometheus-0\" not found",
                                "monitoring/prometheus-kube-prometheus-stack-prometheus-0/oauth2-proxy",
                                "Error from server (BadRequest): previous terminated container \"oauth2-proxy\" in pod \"prometheus-kube-prometheus-stack-prometheus-0\" not found",
                                "monitoring/prometheus-pushgateway-7b8659c68b-28dht/pushgateway",
                                "Error from server (BadRequest): previous terminated container \"pushgateway\" in pod \"prometheus-pushgateway-7b8659c68b-28dht\" not found",
                                "monitoring/vector-qzjms/vector",
                                "Error from server (BadRequest): previous terminated container \"vector\" in pod \"vector-qzjms\" not found",
                                "openstack/barbican-api-775987496d-z6jqv/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"barbican-api-775987496d-z6jqv\" not found",
                                "openstack/barbican-api-775987496d-z6jqv/barbican-api",
                                "Error from server (BadRequest): previous terminated container \"barbican-api\" in pod \"barbican-api-775987496d-z6jqv\" not found",
                                "openstack/barbican-db-init-nm8k6/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"barbican-db-init-nm8k6\" not found",
                                "openstack/barbican-db-init-nm8k6/barbican-db-init-0",
                                "Error from server (BadRequest): previous terminated container \"barbican-db-init-0\" in pod \"barbican-db-init-nm8k6\" not found",
                                "openstack/barbican-db-sync-452x5/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"barbican-db-sync-452x5\" not found",
                                "openstack/barbican-db-sync-452x5/barbican-db-sync",
                                "Error from server (BadRequest): previous terminated container \"barbican-db-sync\" in pod \"barbican-db-sync-452x5\" not found",
                                "openstack/barbican-ks-endpoints-w2ffg/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"barbican-ks-endpoints-w2ffg\" not found",
                                "openstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-admin",
                                "Error from server (BadRequest): previous terminated container \"key-manager-ks-endpoints-admin\" in pod \"barbican-ks-endpoints-w2ffg\" not found",
                                "openstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-internal",
                                "Error from server (BadRequest): previous terminated container \"key-manager-ks-endpoints-internal\" in pod \"barbican-ks-endpoints-w2ffg\" not found",
                                "openstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-public",
                                "Error from server (BadRequest): previous terminated container \"key-manager-ks-endpoints-public\" in pod \"barbican-ks-endpoints-w2ffg\" not found",
                                "openstack/barbican-ks-service-8pm7j/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"barbican-ks-service-8pm7j\" not found",
                                "openstack/barbican-ks-service-8pm7j/key-manager-ks-service-registration",
                                "Error from server (BadRequest): previous terminated container \"key-manager-ks-service-registration\" in pod \"barbican-ks-service-8pm7j\" not found",
                                "openstack/barbican-ks-user-fszfr/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"barbican-ks-user-fszfr\" not found",
                                "openstack/barbican-ks-user-fszfr/barbican-ks-user",
                                "Error from server (BadRequest): previous terminated container \"barbican-ks-user\" in pod \"barbican-ks-user-fszfr\" not found",
                                "openstack/barbican-rabbit-init-j5qmd/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"barbican-rabbit-init-j5qmd\" not found",
                                "openstack/barbican-rabbit-init-j5qmd/rabbit-init",
                                "Error from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"barbican-rabbit-init-j5qmd\" not found",
                                "openstack/cinder-api-86d7694f66-j97gj/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"cinder-api-86d7694f66-j97gj\" not found",
                                "openstack/cinder-api-86d7694f66-j97gj/ceph-coordination-volume-perms",
                                "Error from server (BadRequest): previous terminated container \"ceph-coordination-volume-perms\" in pod \"cinder-api-86d7694f66-j97gj\" not found",
                                "openstack/cinder-api-86d7694f66-j97gj/cinder-api",
                                "Error from server (BadRequest): previous terminated container \"cinder-api\" in pod \"cinder-api-86d7694f66-j97gj\" not found",
                                "openstack/cinder-backup-dcfd7dfb7-sdwkc/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"cinder-backup-dcfd7dfb7-sdwkc\" not found",
                                "openstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-backup-keyring-placement",
                                "Error from server (BadRequest): previous terminated container \"ceph-backup-keyring-placement\" in pod \"cinder-backup-dcfd7dfb7-sdwkc\" not found",
                                "openstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-keyring-placement-rbd1",
                                "Error from server (BadRequest): previous terminated container \"ceph-keyring-placement-rbd1\" in pod \"cinder-backup-dcfd7dfb7-sdwkc\" not found",
                                "openstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-coordination-volume-perms",
                                "Error from server (BadRequest): previous terminated container \"ceph-coordination-volume-perms\" in pod \"cinder-backup-dcfd7dfb7-sdwkc\" not found",
                                "openstack/cinder-backup-dcfd7dfb7-sdwkc/cinder-backup",
                                "Error from server (BadRequest): previous terminated container \"cinder-backup\" in pod \"cinder-backup-dcfd7dfb7-sdwkc\" not found",
                                "openstack/cinder-backup-storage-init-zmnkh/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"cinder-backup-storage-init-zmnkh\" not found",
                                "openstack/cinder-backup-storage-init-zmnkh/ceph-keyring-placement",
                                "Error from server (BadRequest): previous terminated container \"ceph-keyring-placement\" in pod \"cinder-backup-storage-init-zmnkh\" not found",
                                "openstack/cinder-backup-storage-init-zmnkh/cinder-backup-storage-init",
                                "Error from server (BadRequest): previous terminated container \"cinder-backup-storage-init\" in pod \"cinder-backup-storage-init-zmnkh\" not found",
                                "openstack/cinder-bootstrap-wng86/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"cinder-bootstrap-wng86\" not found",
                                "openstack/cinder-bootstrap-wng86/bootstrap",
                                "Error from server (BadRequest): previous terminated container \"bootstrap\" in pod \"cinder-bootstrap-wng86\" not found",
                                "openstack/cinder-create-internal-tenant-6vgll/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"cinder-create-internal-tenant-6vgll\" not found",
                                "openstack/cinder-create-internal-tenant-6vgll/create-internal-tenant",
                                "Error from server (BadRequest): previous terminated container \"create-internal-tenant\" in pod \"cinder-create-internal-tenant-6vgll\" not found",
                                "openstack/cinder-db-init-mzm5b/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"cinder-db-init-mzm5b\" not found",
                                "openstack/cinder-db-init-mzm5b/cinder-db-init-0",
                                "Error from server (BadRequest): previous terminated container \"cinder-db-init-0\" in pod \"cinder-db-init-mzm5b\" not found",
                                "openstack/cinder-db-sync-mz6ls/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"cinder-db-sync-mz6ls\" not found",
                                "openstack/cinder-db-sync-mz6ls/cinder-db-sync",
                                "Error from server (BadRequest): previous terminated container \"cinder-db-sync\" in pod \"cinder-db-sync-mz6ls\" not found",
                                "openstack/cinder-ks-endpoints-xv2tb/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"cinder-ks-endpoints-xv2tb\" not found",
                                "openstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-admin",
                                "Error from server (BadRequest): previous terminated container \"volumev3-ks-endpoints-admin\" in pod \"cinder-ks-endpoints-xv2tb\" not found",
                                "openstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-internal",
                                "Error from server (BadRequest): previous terminated container \"volumev3-ks-endpoints-internal\" in pod \"cinder-ks-endpoints-xv2tb\" not found",
                                "openstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-public",
                                "Error from server (BadRequest): previous terminated container \"volumev3-ks-endpoints-public\" in pod \"cinder-ks-endpoints-xv2tb\" not found",
                                "openstack/cinder-ks-service-dlcxz/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"cinder-ks-service-dlcxz\" not found",
                                "openstack/cinder-ks-service-dlcxz/volumev3-ks-service-registration",
                                "Error from server (BadRequest): previous terminated container \"volumev3-ks-service-registration\" in pod \"cinder-ks-service-dlcxz\" not found",
                                "openstack/cinder-ks-user-5bd5g/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"cinder-ks-user-5bd5g\" not found",
                                "openstack/cinder-ks-user-5bd5g/cinder-ks-user",
                                "Error from server (BadRequest): previous terminated container \"cinder-ks-user\" in pod \"cinder-ks-user-5bd5g\" not found",
                                "openstack/cinder-rabbit-init-l4fpm/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"cinder-rabbit-init-l4fpm\" not found",
                                "openstack/cinder-rabbit-init-l4fpm/rabbit-init",
                                "Error from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"cinder-rabbit-init-l4fpm\" not found",
                                "openstack/cinder-scheduler-586f444995-p7grf/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"cinder-scheduler-586f444995-p7grf\" not found",
                                "openstack/cinder-scheduler-586f444995-p7grf/ceph-coordination-volume-perms",
                                "Error from server (BadRequest): previous terminated container \"ceph-coordination-volume-perms\" in pod \"cinder-scheduler-586f444995-p7grf\" not found",
                                "openstack/cinder-scheduler-586f444995-p7grf/cinder-scheduler",
                                "Error from server (BadRequest): previous terminated container \"cinder-scheduler\" in pod \"cinder-scheduler-586f444995-p7grf\" not found",
                                "openstack/cinder-storage-init-vt6br/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"cinder-storage-init-vt6br\" not found",
                                "openstack/cinder-storage-init-vt6br/ceph-keyring-placement",
                                "Error from server (BadRequest): previous terminated container \"ceph-keyring-placement\" in pod \"cinder-storage-init-vt6br\" not found",
                                "openstack/cinder-storage-init-vt6br/cinder-storage-init-rbd1",
                                "Error from server (BadRequest): previous terminated container \"cinder-storage-init-rbd1\" in pod \"cinder-storage-init-vt6br\" not found",
                                "openstack/cinder-volume-66dc847979-qgp4l/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"cinder-volume-66dc847979-qgp4l\" not found",
                                "openstack/cinder-volume-66dc847979-qgp4l/ceph-keyring-placement-rbd1",
                                "Error from server (BadRequest): previous terminated container \"ceph-keyring-placement-rbd1\" in pod \"cinder-volume-66dc847979-qgp4l\" not found",
                                "openstack/cinder-volume-66dc847979-qgp4l/ceph-coordination-volume-perms",
                                "Error from server (BadRequest): previous terminated container \"ceph-coordination-volume-perms\" in pod \"cinder-volume-66dc847979-qgp4l\" not found",
                                "openstack/cinder-volume-66dc847979-qgp4l/init-cinder-conf",
                                "Error from server (BadRequest): previous terminated container \"init-cinder-conf\" in pod \"cinder-volume-66dc847979-qgp4l\" not found",
                                "openstack/cinder-volume-66dc847979-qgp4l/cinder-volume",
                                "Error from server (BadRequest): previous terminated container \"cinder-volume\" in pod \"cinder-volume-66dc847979-qgp4l\" not found",
                                "openstack/cinder-volume-usage-audit-29540045-jbmvh/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"cinder-volume-usage-audit-29540045-jbmvh\" not found",
                                "openstack/cinder-volume-usage-audit-29540045-jbmvh/cinder-volume-usage-audit",
                                "Error from server (BadRequest): previous terminated container \"cinder-volume-usage-audit\" in pod \"cinder-volume-usage-audit-29540045-jbmvh\" not found",
                                "openstack/glance-api-65d579bfc8-6x76l/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"glance-api-65d579bfc8-6x76l\" not found",
                                "openstack/glance-api-65d579bfc8-6x76l/glance-perms",
                                "Error from server (BadRequest): previous terminated container \"glance-perms\" in pod \"glance-api-65d579bfc8-6x76l\" not found",
                                "openstack/glance-api-65d579bfc8-6x76l/ceph-keyring-placement",
                                "Error from server (BadRequest): previous terminated container \"ceph-keyring-placement\" in pod \"glance-api-65d579bfc8-6x76l\" not found",
                                "openstack/glance-api-65d579bfc8-6x76l/glance-api",
                                "Error from server (BadRequest): previous terminated container \"glance-api\" in pod \"glance-api-65d579bfc8-6x76l\" not found",
                                "openstack/glance-db-init-wbpff/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"glance-db-init-wbpff\" not found",
                                "openstack/glance-db-init-wbpff/glance-db-init-0",
                                "Error from server (BadRequest): previous terminated container \"glance-db-init-0\" in pod \"glance-db-init-wbpff\" not found",
                                "openstack/glance-db-sync-gk84f/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"glance-db-sync-gk84f\" not found",
                                "openstack/glance-db-sync-gk84f/glance-db-sync",
                                "Error from server (BadRequest): previous terminated container \"glance-db-sync\" in pod \"glance-db-sync-gk84f\" not found",
                                "openstack/glance-ks-endpoints-dq2cc/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"glance-ks-endpoints-dq2cc\" not found",
                                "openstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-admin",
                                "Error from server (BadRequest): previous terminated container \"image-ks-endpoints-admin\" in pod \"glance-ks-endpoints-dq2cc\" not found",
                                "openstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-internal",
                                "Error from server (BadRequest): previous terminated container \"image-ks-endpoints-internal\" in pod \"glance-ks-endpoints-dq2cc\" not found",
                                "openstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-public",
                                "Error from server (BadRequest): previous terminated container \"image-ks-endpoints-public\" in pod \"glance-ks-endpoints-dq2cc\" not found",
                                "openstack/glance-ks-service-5h6bw/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"glance-ks-service-5h6bw\" not found",
                                "openstack/glance-ks-service-5h6bw/image-ks-service-registration",
                                "Error from server (BadRequest): previous terminated container \"image-ks-service-registration\" in pod \"glance-ks-service-5h6bw\" not found",
                                "openstack/glance-ks-user-lcfxr/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"glance-ks-user-lcfxr\" not found",
                                "openstack/glance-ks-user-lcfxr/glance-ks-user",
                                "Error from server (BadRequest): previous terminated container \"glance-ks-user\" in pod \"glance-ks-user-lcfxr\" not found",
                                "openstack/glance-metadefs-load-476tp/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"glance-metadefs-load-476tp\" not found",
                                "openstack/glance-metadefs-load-476tp/glance-metadefs-load",
                                "Error from server (BadRequest): previous terminated container \"glance-metadefs-load\" in pod \"glance-metadefs-load-476tp\" not found",
                                "openstack/glance-rabbit-init-c6rjt/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"glance-rabbit-init-c6rjt\" not found",
                                "openstack/glance-rabbit-init-c6rjt/rabbit-init",
                                "Error from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"glance-rabbit-init-c6rjt\" not found",
                                "openstack/glance-storage-init-hdcpc/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"glance-storage-init-hdcpc\" not found",
                                "openstack/glance-storage-init-hdcpc/ceph-keyring-placement",
                                "Error from server (BadRequest): previous terminated container \"ceph-keyring-placement\" in pod \"glance-storage-init-hdcpc\" not found",
                                "openstack/glance-storage-init-hdcpc/glance-storage-init",
                                "Error from server (BadRequest): previous terminated container \"glance-storage-init\" in pod \"glance-storage-init-hdcpc\" not found",
                                "openstack/heat-api-6d65f9477-kmbkl/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"heat-api-6d65f9477-kmbkl\" not found",
                                "openstack/heat-api-6d65f9477-kmbkl/heat-api",
                                "Error from server (BadRequest): previous terminated container \"heat-api\" in pod \"heat-api-6d65f9477-kmbkl\" not found",
                                "openstack/heat-bootstrap-9dwg2/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"heat-bootstrap-9dwg2\" not found",
                                "openstack/heat-bootstrap-9dwg2/bootstrap",
                                "Error from server (BadRequest): previous terminated container \"bootstrap\" in pod \"heat-bootstrap-9dwg2\" not found",
                                "openstack/heat-cfn-f44db7787-t8f7m/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"heat-cfn-f44db7787-t8f7m\" not found",
                                "openstack/heat-cfn-f44db7787-t8f7m/heat-cfn",
                                "Error from server (BadRequest): previous terminated container \"heat-cfn\" in pod \"heat-cfn-f44db7787-t8f7m\" not found",
                                "openstack/heat-db-init-fk8qw/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"heat-db-init-fk8qw\" not found",
                                "openstack/heat-db-init-fk8qw/heat-db-init-0",
                                "Error from server (BadRequest): previous terminated container \"heat-db-init-0\" in pod \"heat-db-init-fk8qw\" not found",
                                "openstack/heat-db-sync-cxmcb/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"heat-db-sync-cxmcb\" not found",
                                "openstack/heat-db-sync-cxmcb/heat-db-sync",
                                "Error from server (BadRequest): previous terminated container \"heat-db-sync\" in pod \"heat-db-sync-cxmcb\" not found",
                                "openstack/heat-domain-ks-user-tq2c5/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"heat-domain-ks-user-tq2c5\" not found",
                                "openstack/heat-domain-ks-user-tq2c5/heat-ks-domain-user",
                                "Error from server (BadRequest): previous terminated container \"heat-ks-domain-user\" in pod \"heat-domain-ks-user-tq2c5\" not found",
                                "openstack/heat-engine-64f8b77bfb-wngkr/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"heat-engine-64f8b77bfb-wngkr\" not found",
                                "openstack/heat-engine-64f8b77bfb-wngkr/heat-engine",
                                "Error from server (BadRequest): previous terminated container \"heat-engine\" in pod \"heat-engine-64f8b77bfb-wngkr\" not found",
                                "openstack/heat-engine-cleaner-29540055-p9pq9/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"heat-engine-cleaner-29540055-p9pq9\" not found",
                                "openstack/heat-engine-cleaner-29540055-p9pq9/heat-engine-cleaner",
                                "Error from server (BadRequest): previous terminated container \"heat-engine-cleaner\" in pod \"heat-engine-cleaner-29540055-p9pq9\" not found",
                                "openstack/heat-engine-cleaner-29540060-z4g95/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"heat-engine-cleaner-29540060-z4g95\" not found",
                                "openstack/heat-engine-cleaner-29540060-z4g95/heat-engine-cleaner",
                                "Error from server (BadRequest): previous terminated container \"heat-engine-cleaner\" in pod \"heat-engine-cleaner-29540060-z4g95\" not found",
                                "openstack/heat-engine-cleaner-29540065-rcjr2/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"heat-engine-cleaner-29540065-rcjr2\" not found",
                                "openstack/heat-engine-cleaner-29540065-rcjr2/heat-engine-cleaner",
                                "Error from server (BadRequest): previous terminated container \"heat-engine-cleaner\" in pod \"heat-engine-cleaner-29540065-rcjr2\" not found",
                                "openstack/heat-ks-endpoints-wwzbz/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"heat-ks-endpoints-wwzbz\" not found",
                                "openstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-admin",
                                "Error from server (BadRequest): previous terminated container \"orchestration-ks-endpoints-admin\" in pod \"heat-ks-endpoints-wwzbz\" not found",
                                "openstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-internal",
                                "Error from server (BadRequest): previous terminated container \"orchestration-ks-endpoints-internal\" in pod \"heat-ks-endpoints-wwzbz\" not found",
                                "openstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-public",
                                "Error from server (BadRequest): previous terminated container \"orchestration-ks-endpoints-public\" in pod \"heat-ks-endpoints-wwzbz\" not found",
                                "openstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-admin",
                                "Error from server (BadRequest): previous terminated container \"cloudformation-ks-endpoints-admin\" in pod \"heat-ks-endpoints-wwzbz\" not found",
                                "openstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-internal",
                                "Error from server (BadRequest): previous terminated container \"cloudformation-ks-endpoints-internal\" in pod \"heat-ks-endpoints-wwzbz\" not found",
                                "openstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-public",
                                "Error from server (BadRequest): previous terminated container \"cloudformation-ks-endpoints-public\" in pod \"heat-ks-endpoints-wwzbz\" not found",
                                "openstack/heat-ks-service-8pxqz/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"heat-ks-service-8pxqz\" not found",
                                "openstack/heat-ks-service-8pxqz/orchestration-ks-service-registration",
                                "Error from server (BadRequest): previous terminated container \"orchestration-ks-service-registration\" in pod \"heat-ks-service-8pxqz\" not found",
                                "openstack/heat-ks-service-8pxqz/cloudformation-ks-service-registration",
                                "Error from server (BadRequest): previous terminated container \"cloudformation-ks-service-registration\" in pod \"heat-ks-service-8pxqz\" not found",
                                "openstack/heat-ks-user-tfk98/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"heat-ks-user-tfk98\" not found",
                                "openstack/heat-ks-user-tfk98/heat-ks-user",
                                "Error from server (BadRequest): previous terminated container \"heat-ks-user\" in pod \"heat-ks-user-tfk98\" not found",
                                "openstack/heat-ks-user-tfk98/heat-trustee-ks-user",
                                "Error from server (BadRequest): previous terminated container \"heat-trustee-ks-user\" in pod \"heat-ks-user-tfk98\" not found",
                                "openstack/heat-rabbit-init-rbl9n/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"heat-rabbit-init-rbl9n\" not found",
                                "openstack/heat-rabbit-init-rbl9n/rabbit-init",
                                "Error from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"heat-rabbit-init-rbl9n\" not found",
                                "openstack/heat-trusts-czrrv/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"heat-trusts-czrrv\" not found",
                                "openstack/heat-trusts-czrrv/heat-trusts",
                                "Error from server (BadRequest): previous terminated container \"heat-trusts\" in pod \"heat-trusts-czrrv\" not found",
                                "openstack/horizon-8cdd7b888-bvzvx/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"horizon-8cdd7b888-bvzvx\" not found",
                                "openstack/horizon-8cdd7b888-bvzvx/horizon",
                                "Error from server (BadRequest): previous terminated container \"horizon\" in pod \"horizon-8cdd7b888-bvzvx\" not found",
                                "openstack/horizon-db-init-s5pbw/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"horizon-db-init-s5pbw\" not found",
                                "openstack/horizon-db-init-s5pbw/horizon-db-init-0",
                                "Error from server (BadRequest): previous terminated container \"horizon-db-init-0\" in pod \"horizon-db-init-s5pbw\" not found",
                                "openstack/horizon-db-sync-bgr2g/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"horizon-db-sync-bgr2g\" not found",
                                "openstack/horizon-db-sync-bgr2g/horizon-db-sync",
                                "Error from server (BadRequest): previous terminated container \"horizon-db-sync\" in pod \"horizon-db-sync-bgr2g\" not found",
                                "openstack/keepalived-7jdfz/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"keepalived-7jdfz\" not found",
                                "openstack/keepalived-7jdfz/wait-for-ip",
                                "Error from server (BadRequest): previous terminated container \"wait-for-ip\" in pod \"keepalived-7jdfz\" not found",
                                "openstack/keepalived-7jdfz/keepalived",
                                "Error from server (BadRequest): previous terminated container \"keepalived\" in pod \"keepalived-7jdfz\" not found",
                                "openstack/keystone-api-c4656754c-mqbxm/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"keystone-api-c4656754c-mqbxm\" not found",
                                "openstack/keystone-api-c4656754c-mqbxm/keystone-api",
                                "Error from server (BadRequest): previous terminated container \"keystone-api\" in pod \"keystone-api-c4656754c-mqbxm\" not found",
                                "openstack/keystone-bootstrap-mdtrx/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"keystone-bootstrap-mdtrx\" not found",
                                "openstack/keystone-bootstrap-mdtrx/bootstrap",
                                "Error from server (BadRequest): previous terminated container \"bootstrap\" in pod \"keystone-bootstrap-mdtrx\" not found",
                                "openstack/keystone-credential-setup-6xsvx/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"keystone-credential-setup-6xsvx\" not found",
                                "openstack/keystone-credential-setup-6xsvx/keystone-credential-setup",
                                "Error from server (BadRequest): previous terminated container \"keystone-credential-setup\" in pod \"keystone-credential-setup-6xsvx\" not found",
                                "openstack/keystone-db-init-z5mwz/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"keystone-db-init-z5mwz\" not found",
                                "openstack/keystone-db-init-z5mwz/keystone-db-init-0",
                                "Error from server (BadRequest): previous terminated container \"keystone-db-init-0\" in pod \"keystone-db-init-z5mwz\" not found",
                                "openstack/keystone-db-sync-zsq8z/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"keystone-db-sync-zsq8z\" not found",
                                "openstack/keystone-db-sync-zsq8z/keystone-db-sync",
                                "Error from server (BadRequest): previous terminated container \"keystone-db-sync\" in pod \"keystone-db-sync-zsq8z\" not found",
                                "openstack/keystone-domain-manage-v865d/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"keystone-domain-manage-v865d\" not found",
                                "openstack/keystone-domain-manage-v865d/keystone-domain-manage",
                                "Error from server (BadRequest): previous terminated container \"keystone-domain-manage\" in pod \"keystone-domain-manage-v865d\" not found",
                                "openstack/keystone-fernet-setup-5rfqs/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"keystone-fernet-setup-5rfqs\" not found",
                                "openstack/keystone-fernet-setup-5rfqs/keystone-fernet-setup",
                                "Error from server (BadRequest): previous terminated container \"keystone-fernet-setup\" in pod \"keystone-fernet-setup-5rfqs\" not found",
                                "openstack/keystone-rabbit-init-m44qz/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"keystone-rabbit-init-m44qz\" not found",
                                "openstack/keystone-rabbit-init-m44qz/rabbit-init",
                                "Error from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"keystone-rabbit-init-m44qz\" not found",
                                "openstack/libvirt-libvirt-default-6bgrg/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"libvirt-libvirt-default-6bgrg\" not found",
                                "openstack/libvirt-libvirt-default-6bgrg/init-dynamic-options",
                                "Error from server (BadRequest): previous terminated container \"init-dynamic-options\" in pod \"libvirt-libvirt-default-6bgrg\" not found",
                                "openstack/libvirt-libvirt-default-6bgrg/ceph-admin-keyring-placement",
                                "Error from server (BadRequest): previous terminated container \"ceph-admin-keyring-placement\" in pod \"libvirt-libvirt-default-6bgrg\" not found",
                                "openstack/libvirt-libvirt-default-6bgrg/ceph-keyring-placement",
                                "Error from server (BadRequest): previous terminated container \"ceph-keyring-placement\" in pod \"libvirt-libvirt-default-6bgrg\" not found",
                                "openstack/libvirt-libvirt-default-6bgrg/tls-sidecar",
                                "Error from server (BadRequest): previous terminated container \"tls-sidecar\" in pod \"libvirt-libvirt-default-6bgrg\" not found",
                                "openstack/libvirt-libvirt-default-6bgrg/libvirt",
                                "Error from server (BadRequest): previous terminated container \"libvirt\" in pod \"libvirt-libvirt-default-6bgrg\" not found",
                                "openstack/libvirt-libvirt-default-6bgrg/libvirt-exporter",
                                "Error from server (BadRequest): previous terminated container \"libvirt-exporter\" in pod \"libvirt-libvirt-default-6bgrg\" not found",
                                "openstack/magnum-api-8549df7884-9b2zc/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"magnum-api-8549df7884-9b2zc\" not found",
                                "openstack/magnum-api-8549df7884-9b2zc/magnum-api",
                                "Error from server (BadRequest): previous terminated container \"magnum-api\" in pod \"magnum-api-8549df7884-9b2zc\" not found",
                                "openstack/magnum-cluster-api-proxy-z2flh/magnum-cluster-api-proxy",
                                "Error from server (BadRequest): previous terminated container \"magnum-cluster-api-proxy\" in pod \"magnum-cluster-api-proxy-z2flh\" not found",
                                "openstack/magnum-conductor-0/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"magnum-conductor-0\" not found",
                                "openstack/magnum-conductor-0/magnum-conductor-init",
                                "Error from server (BadRequest): previous terminated container \"magnum-conductor-init\" in pod \"magnum-conductor-0\" not found",
                                "openstack/magnum-conductor-0/magnum-conductor",
                                "Error from server (BadRequest): previous terminated container \"magnum-conductor\" in pod \"magnum-conductor-0\" not found",
                                "openstack/magnum-db-init-dshrc/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"magnum-db-init-dshrc\" not found",
                                "openstack/magnum-db-init-dshrc/magnum-db-init-0",
                                "Error from server (BadRequest): previous terminated container \"magnum-db-init-0\" in pod \"magnum-db-init-dshrc\" not found",
                                "openstack/magnum-db-sync-8ttpk/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"magnum-db-sync-8ttpk\" not found",
                                "openstack/magnum-db-sync-8ttpk/magnum-db-sync",
                                "Error from server (BadRequest): previous terminated container \"magnum-db-sync\" in pod \"magnum-db-sync-8ttpk\" not found",
                                "openstack/magnum-domain-ks-user-vp8f2/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"magnum-domain-ks-user-vp8f2\" not found",
                                "openstack/magnum-domain-ks-user-vp8f2/magnum-ks-domain-user",
                                "Error from server (BadRequest): previous terminated container \"magnum-ks-domain-user\" in pod \"magnum-domain-ks-user-vp8f2\" not found",
                                "openstack/magnum-ks-endpoints-jvzvf/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"magnum-ks-endpoints-jvzvf\" not found",
                                "openstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-admin",
                                "Error from server (BadRequest): previous terminated container \"container-infra-ks-endpoints-admin\" in pod \"magnum-ks-endpoints-jvzvf\" not found",
                                "openstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-internal",
                                "Error from server (BadRequest): previous terminated container \"container-infra-ks-endpoints-internal\" in pod \"magnum-ks-endpoints-jvzvf\" not found",
                                "openstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-public",
                                "Error from server (BadRequest): previous terminated container \"container-infra-ks-endpoints-public\" in pod \"magnum-ks-endpoints-jvzvf\" not found",
                                "openstack/magnum-ks-service-vdn67/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"magnum-ks-service-vdn67\" not found",
                                "openstack/magnum-ks-service-vdn67/container-infra-ks-service-registration",
                                "Error from server (BadRequest): previous terminated container \"container-infra-ks-service-registration\" in pod \"magnum-ks-service-vdn67\" not found",
                                "openstack/magnum-ks-user-4wvtj/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"magnum-ks-user-4wvtj\" not found",
                                "openstack/magnum-ks-user-4wvtj/magnum-ks-user",
                                "Error from server (BadRequest): previous terminated container \"magnum-ks-user\" in pod \"magnum-ks-user-4wvtj\" not found",
                                "openstack/magnum-rabbit-init-w7jc7/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"magnum-rabbit-init-w7jc7\" not found",
                                "openstack/magnum-rabbit-init-w7jc7/rabbit-init",
                                "Error from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"magnum-rabbit-init-w7jc7\" not found",
                                "openstack/magnum-registry-c45778976-2zz96/registry",
                                "Error from server (BadRequest): previous terminated container \"registry\" in pod \"magnum-registry-c45778976-2zz96\" not found",
                                "openstack/manila-api-5cdf958bd9-hmbmb/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"manila-api-5cdf958bd9-hmbmb\" not found",
                                "openstack/manila-api-5cdf958bd9-hmbmb/manila-api",
                                "Error from server (BadRequest): previous terminated container \"manila-api\" in pod \"manila-api-5cdf958bd9-hmbmb\" not found",
                                "openstack/manila-bootstrap-5wn97/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"manila-bootstrap-5wn97\" not found",
                                "openstack/manila-bootstrap-5wn97/bootstrap",
                                "Error from server (BadRequest): previous terminated container \"bootstrap\" in pod \"manila-bootstrap-5wn97\" not found",
                                "openstack/manila-data-75cbc955bd-27jjw/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"manila-data-75cbc955bd-27jjw\" not found",
                                "openstack/manila-data-75cbc955bd-27jjw/manila-data",
                                "Error from server (BadRequest): previous terminated container \"manila-data\" in pod \"manila-data-75cbc955bd-27jjw\" not found",
                                "openstack/manila-db-init-pbdm8/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"manila-db-init-pbdm8\" not found",
                                "openstack/manila-db-init-pbdm8/manila-db-init-0",
                                "Error from server (BadRequest): previous terminated container \"manila-db-init-0\" in pod \"manila-db-init-pbdm8\" not found",
                                "openstack/manila-db-sync-rm9mz/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"manila-db-sync-rm9mz\" not found",
                                "openstack/manila-db-sync-rm9mz/manila-db-sync",
                                "Error from server (BadRequest): previous terminated container \"manila-db-sync\" in pod \"manila-db-sync-rm9mz\" not found",
                                "openstack/manila-ks-endpoints-d8nr9/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"manila-ks-endpoints-d8nr9\" not found",
                                "openstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-admin",
                                "Error from server (BadRequest): previous terminated container \"share-ks-endpoints-admin\" in pod \"manila-ks-endpoints-d8nr9\" not found",
                                "openstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-internal",
                                "Error from server (BadRequest): previous terminated container \"share-ks-endpoints-internal\" in pod \"manila-ks-endpoints-d8nr9\" not found",
                                "openstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-public",
                                "Error from server (BadRequest): previous terminated container \"share-ks-endpoints-public\" in pod \"manila-ks-endpoints-d8nr9\" not found",
                                "openstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-admin",
                                "Error from server (BadRequest): previous terminated container \"sharev2-ks-endpoints-admin\" in pod \"manila-ks-endpoints-d8nr9\" not found",
                                "openstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-internal",
                                "Error from server (BadRequest): previous terminated container \"sharev2-ks-endpoints-internal\" in pod \"manila-ks-endpoints-d8nr9\" not found",
                                "openstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-public",
                                "Error from server (BadRequest): previous terminated container \"sharev2-ks-endpoints-public\" in pod \"manila-ks-endpoints-d8nr9\" not found",
                                "openstack/manila-ks-service-g7svt/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"manila-ks-service-g7svt\" not found",
                                "openstack/manila-ks-service-g7svt/share-ks-service-registration",
                                "Error from server (BadRequest): previous terminated container \"share-ks-service-registration\" in pod \"manila-ks-service-g7svt\" not found",
                                "openstack/manila-ks-service-g7svt/sharev2-ks-service-registration",
                                "Error from server (BadRequest): previous terminated container \"sharev2-ks-service-registration\" in pod \"manila-ks-service-g7svt\" not found",
                                "openstack/manila-ks-user-pr9mg/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"manila-ks-user-pr9mg\" not found",
                                "openstack/manila-ks-user-pr9mg/manila-ks-user",
                                "Error from server (BadRequest): previous terminated container \"manila-ks-user\" in pod \"manila-ks-user-pr9mg\" not found",
                                "openstack/manila-rabbit-init-74vjs/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"manila-rabbit-init-74vjs\" not found",
                                "openstack/manila-rabbit-init-74vjs/rabbit-init",
                                "Error from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"manila-rabbit-init-74vjs\" not found",
                                "openstack/manila-scheduler-5b584c8656-mmnnd/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"manila-scheduler-5b584c8656-mmnnd\" not found",
                                "openstack/manila-scheduler-5b584c8656-mmnnd/manila-scheduler",
                                "Error from server (BadRequest): previous terminated container \"manila-scheduler\" in pod \"manila-scheduler-5b584c8656-mmnnd\" not found",
                                "openstack/manila-share-68879775b-rc6q9/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"manila-share-68879775b-rc6q9\" not found",
                                "openstack/manila-share-68879775b-rc6q9/manila-share-init",
                                "Error from server (BadRequest): previous terminated container \"manila-share-init\" in pod \"manila-share-68879775b-rc6q9\" not found",
                                "openstack/manila-share-68879775b-rc6q9/manila-share",
                                "Error from server (BadRequest): previous terminated container \"manila-share\" in pod \"manila-share-68879775b-rc6q9\" not found",
                                "openstack/memcached-memcached-6479589586-9sxjx/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"memcached-memcached-6479589586-9sxjx\" not found",
                                "openstack/memcached-memcached-6479589586-9sxjx/memcached",
                                "Error from server (BadRequest): previous terminated container \"memcached\" in pod \"memcached-memcached-6479589586-9sxjx\" not found",
                                "openstack/memcached-memcached-6479589586-9sxjx/memcached-exporter",
                                "Error from server (BadRequest): previous terminated container \"memcached-exporter\" in pod \"memcached-memcached-6479589586-9sxjx\" not found",
                                "openstack/neutron-db-init-l7c9v/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"neutron-db-init-l7c9v\" not found",
                                "openstack/neutron-db-init-l7c9v/neutron-db-init-0",
                                "Error from server (BadRequest): previous terminated container \"neutron-db-init-0\" in pod \"neutron-db-init-l7c9v\" not found",
                                "openstack/neutron-db-sync-brwb5/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"neutron-db-sync-brwb5\" not found",
                                "openstack/neutron-db-sync-brwb5/neutron-db-sync",
                                "Error from server (BadRequest): previous terminated container \"neutron-db-sync\" in pod \"neutron-db-sync-brwb5\" not found",
                                "openstack/neutron-ks-endpoints-dstkg/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"neutron-ks-endpoints-dstkg\" not found",
                                "openstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-admin",
                                "Error from server (BadRequest): previous terminated container \"network-ks-endpoints-admin\" in pod \"neutron-ks-endpoints-dstkg\" not found",
                                "openstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-internal",
                                "Error from server (BadRequest): previous terminated container \"network-ks-endpoints-internal\" in pod \"neutron-ks-endpoints-dstkg\" not found",
                                "openstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-public",
                                "Error from server (BadRequest): previous terminated container \"network-ks-endpoints-public\" in pod \"neutron-ks-endpoints-dstkg\" not found",
                                "openstack/neutron-ks-service-sq4tp/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"neutron-ks-service-sq4tp\" not found",
                                "openstack/neutron-ks-service-sq4tp/network-ks-service-registration",
                                "Error from server (BadRequest): previous terminated container \"network-ks-service-registration\" in pod \"neutron-ks-service-sq4tp\" not found",
                                "openstack/neutron-ks-user-kcfc4/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"neutron-ks-user-kcfc4\" not found",
                                "openstack/neutron-ks-user-kcfc4/neutron-ks-user",
                                "Error from server (BadRequest): previous terminated container \"neutron-ks-user\" in pod \"neutron-ks-user-kcfc4\" not found",
                                "openstack/neutron-netns-cleanup-cron-default-8frwf/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"neutron-netns-cleanup-cron-default-8frwf\" not found",
                                "openstack/neutron-netns-cleanup-cron-default-8frwf/neutron-netns-cleanup-cron",
                                "Error from server (BadRequest): previous terminated container \"neutron-netns-cleanup-cron\" in pod \"neutron-netns-cleanup-cron-default-8frwf\" not found",
                                "openstack/neutron-ovn-metadata-agent-default-flhb5/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"neutron-ovn-metadata-agent-default-flhb5\" not found",
                                "openstack/neutron-ovn-metadata-agent-default-flhb5/neutron-metadata-agent-init",
                                "Error from server (BadRequest): previous terminated container \"neutron-metadata-agent-init\" in pod \"neutron-ovn-metadata-agent-default-flhb5\" not found",
                                "openstack/neutron-ovn-metadata-agent-default-flhb5/ovn-neutron-init",
                                "Error from server (BadRequest): previous terminated container \"ovn-neutron-init\" in pod \"neutron-ovn-metadata-agent-default-flhb5\" not found",
                                "openstack/neutron-ovn-metadata-agent-default-flhb5/neutron-ovn-metadata-agent",
                                "Error from server (BadRequest): previous terminated container \"neutron-ovn-metadata-agent\" in pod \"neutron-ovn-metadata-agent-default-flhb5\" not found",
                                "openstack/neutron-rabbit-init-rdnbf/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"neutron-rabbit-init-rdnbf\" not found",
                                "openstack/neutron-rabbit-init-rdnbf/rabbit-init",
                                "Error from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"neutron-rabbit-init-rdnbf\" not found",
                                "openstack/neutron-server-649c5974f6-5dkvl/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"neutron-server-649c5974f6-5dkvl\" not found",
                                "openstack/neutron-server-649c5974f6-5dkvl/ovn-neutron-init",
                                "Error from server (BadRequest): previous terminated container \"ovn-neutron-init\" in pod \"neutron-server-649c5974f6-5dkvl\" not found",
                                "openstack/neutron-server-649c5974f6-5dkvl/neutron-server",
                                "Error from server (BadRequest): previous terminated container \"neutron-server\" in pod \"neutron-server-649c5974f6-5dkvl\" not found",
                                "openstack/neutron-server-649c5974f6-5dkvl/neutron-policy-server",
                                "Error from server (BadRequest): previous terminated container \"neutron-policy-server\" in pod \"neutron-server-649c5974f6-5dkvl\" not found",
                                "openstack/nova-api-metadata-546d94ddd7-btnrc/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-api-metadata-546d94ddd7-btnrc\" not found",
                                "openstack/nova-api-metadata-546d94ddd7-btnrc/nova-api-metadata-init",
                                "Error from server (BadRequest): previous terminated container \"nova-api-metadata-init\" in pod \"nova-api-metadata-546d94ddd7-btnrc\" not found",
                                "openstack/nova-api-metadata-546d94ddd7-btnrc/nova-api",
                                "Error from server (BadRequest): previous terminated container \"nova-api\" in pod \"nova-api-metadata-546d94ddd7-btnrc\" not found",
                                "openstack/nova-api-osapi-99c7b7cd8-2lnzr/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-api-osapi-99c7b7cd8-2lnzr\" not found",
                                "openstack/nova-api-osapi-99c7b7cd8-2lnzr/nova-osapi",
                                "Error from server (BadRequest): previous terminated container \"nova-osapi\" in pod \"nova-api-osapi-99c7b7cd8-2lnzr\" not found",
                                "openstack/nova-bootstrap-trzqq/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-bootstrap-trzqq\" not found",
                                "openstack/nova-bootstrap-trzqq/bootstrap",
                                "Error from server (BadRequest): previous terminated container \"bootstrap\" in pod \"nova-bootstrap-trzqq\" not found",
                                "openstack/nova-cell-setup-29540040-rtzd7/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-cell-setup-29540040-rtzd7\" not found",
                                "openstack/nova-cell-setup-29540040-rtzd7/nova-cell-setup",
                                "Error from server (BadRequest): previous terminated container \"nova-cell-setup\" in pod \"nova-cell-setup-29540040-rtzd7\" not found",
                                "openstack/nova-cell-setup-j97qh/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-cell-setup-j97qh\" not found",
                                "openstack/nova-cell-setup-j97qh/nova-cell-setup-init",
                                "Error from server (BadRequest): previous terminated container \"nova-cell-setup-init\" in pod \"nova-cell-setup-j97qh\" not found",
                                "openstack/nova-cell-setup-j97qh/nova-cell-setup",
                                "Error from server (BadRequest): previous terminated container \"nova-cell-setup\" in pod \"nova-cell-setup-j97qh\" not found",
                                "openstack/nova-compute-default-2v5pd/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-compute-default-2v5pd\" not found",
                                "openstack/nova-compute-default-2v5pd/nova-compute-init",
                                "Error from server (BadRequest): previous terminated container \"nova-compute-init\" in pod \"nova-compute-default-2v5pd\" not found",
                                "openstack/nova-compute-default-2v5pd/ceph-perms",
                                "Error from server (BadRequest): previous terminated container \"ceph-perms\" in pod \"nova-compute-default-2v5pd\" not found",
                                "openstack/nova-compute-default-2v5pd/ceph-admin-keyring-placement",
                                "Error from server (BadRequest): previous terminated container \"ceph-admin-keyring-placement\" in pod \"nova-compute-default-2v5pd\" not found",
                                "openstack/nova-compute-default-2v5pd/ceph-keyring-placement",
                                "Error from server (BadRequest): previous terminated container \"ceph-keyring-placement\" in pod \"nova-compute-default-2v5pd\" not found",
                                "openstack/nova-compute-default-2v5pd/nova-compute-vnc-init",
                                "Error from server (BadRequest): previous terminated container \"nova-compute-vnc-init\" in pod \"nova-compute-default-2v5pd\" not found",
                                "openstack/nova-compute-default-2v5pd/nova-compute-ssh-init",
                                "Error from server (BadRequest): previous terminated container \"nova-compute-ssh-init\" in pod \"nova-compute-default-2v5pd\" not found",
                                "openstack/nova-compute-default-2v5pd/nova-compute",
                                "Error from server (BadRequest): previous terminated container \"nova-compute\" in pod \"nova-compute-default-2v5pd\" not found",
                                "openstack/nova-compute-default-2v5pd/nova-compute-ssh",
                                "Error from server (BadRequest): previous terminated container \"nova-compute-ssh\" in pod \"nova-compute-default-2v5pd\" not found",
                                "openstack/nova-conductor-5474cb4b8d-bxzhq/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-conductor-5474cb4b8d-bxzhq\" not found",
                                "openstack/nova-conductor-5474cb4b8d-bxzhq/nova-conductor",
                                "Error from server (BadRequest): previous terminated container \"nova-conductor\" in pod \"nova-conductor-5474cb4b8d-bxzhq\" not found",
                                "openstack/nova-db-init-b4sqh/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-db-init-b4sqh\" not found",
                                "openstack/nova-db-init-b4sqh/nova-db-init-0",
                                "Error from server (BadRequest): previous terminated container \"nova-db-init-0\" in pod \"nova-db-init-b4sqh\" not found",
                                "openstack/nova-db-init-b4sqh/nova-db-init-1",
                                "Error from server (BadRequest): previous terminated container \"nova-db-init-1\" in pod \"nova-db-init-b4sqh\" not found",
                                "openstack/nova-db-init-b4sqh/nova-db-init-2",
                                "Error from server (BadRequest): previous terminated container \"nova-db-init-2\" in pod \"nova-db-init-b4sqh\" not found",
                                "openstack/nova-db-sync-2rbjc/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-db-sync-2rbjc\" not found",
                                "openstack/nova-db-sync-2rbjc/nova-db-sync",
                                "Error from server (BadRequest): previous terminated container \"nova-db-sync\" in pod \"nova-db-sync-2rbjc\" not found",
                                "openstack/nova-ks-endpoints-zwcm6/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-ks-endpoints-zwcm6\" not found",
                                "openstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-admin",
                                "Error from server (BadRequest): previous terminated container \"compute-ks-endpoints-admin\" in pod \"nova-ks-endpoints-zwcm6\" not found",
                                "openstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-internal",
                                "Error from server (BadRequest): previous terminated container \"compute-ks-endpoints-internal\" in pod \"nova-ks-endpoints-zwcm6\" not found",
                                "openstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-public",
                                "Error from server (BadRequest): previous terminated container \"compute-ks-endpoints-public\" in pod \"nova-ks-endpoints-zwcm6\" not found",
                                "openstack/nova-ks-service-fmj77/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-ks-service-fmj77\" not found",
                                "openstack/nova-ks-service-fmj77/compute-ks-service-registration",
                                "Error from server (BadRequest): previous terminated container \"compute-ks-service-registration\" in pod \"nova-ks-service-fmj77\" not found",
                                "openstack/nova-ks-user-t8xgz/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-ks-user-t8xgz\" not found",
                                "openstack/nova-ks-user-t8xgz/nova-ks-user",
                                "Error from server (BadRequest): previous terminated container \"nova-ks-user\" in pod \"nova-ks-user-t8xgz\" not found",
                                "openstack/nova-novncproxy-85dd5b5965-z6hmj/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-novncproxy-85dd5b5965-z6hmj\" not found",
                                "openstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy-init",
                                "Error from server (BadRequest): previous terminated container \"nova-novncproxy-init\" in pod \"nova-novncproxy-85dd5b5965-z6hmj\" not found",
                                "openstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy-init-assets",
                                "Error from server (BadRequest): previous terminated container \"nova-novncproxy-init-assets\" in pod \"nova-novncproxy-85dd5b5965-z6hmj\" not found",
                                "openstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy",
                                "Error from server (BadRequest): previous terminated container \"nova-novncproxy\" in pod \"nova-novncproxy-85dd5b5965-z6hmj\" not found",
                                "openstack/nova-rabbit-init-szpvx/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-rabbit-init-szpvx\" not found",
                                "openstack/nova-rabbit-init-szpvx/rabbit-init",
                                "Error from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"nova-rabbit-init-szpvx\" not found",
                                "openstack/nova-scheduler-78775555d4-hb2j9/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-scheduler-78775555d4-hb2j9\" not found",
                                "openstack/nova-scheduler-78775555d4-hb2j9/nova-scheduler",
                                "Error from server (BadRequest): previous terminated container \"nova-scheduler\" in pod \"nova-scheduler-78775555d4-hb2j9\" not found",
                                "openstack/nova-service-cleaner-29540040-cxdd4/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"nova-service-cleaner-29540040-cxdd4\" not found",
                                "openstack/nova-service-cleaner-29540040-cxdd4/nova-service-cleaner",
                                "Error from server (BadRequest): previous terminated container \"nova-service-cleaner\" in pod \"nova-service-cleaner-29540040-cxdd4\" not found",
                                "openstack/octavia-api-75db6578cf-m656r/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"octavia-api-75db6578cf-m656r\" not found",
                                "openstack/octavia-api-75db6578cf-m656r/octavia-api",
                                "Error from server (BadRequest): previous terminated container \"octavia-api\" in pod \"octavia-api-75db6578cf-m656r\" not found",
                                "openstack/octavia-bootstrap-kwfv2/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"octavia-bootstrap-kwfv2\" not found",
                                "openstack/octavia-bootstrap-kwfv2/bootstrap",
                                "Error from server (BadRequest): previous terminated container \"bootstrap\" in pod \"octavia-bootstrap-kwfv2\" not found",
                                "openstack/octavia-db-init-wnz5h/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"octavia-db-init-wnz5h\" not found",
                                "openstack/octavia-db-init-wnz5h/octavia-db-init-0",
                                "Error from server (BadRequest): previous terminated container \"octavia-db-init-0\" in pod \"octavia-db-init-wnz5h\" not found",
                                "openstack/octavia-db-init-wnz5h/octavia-db-init-1",
                                "Error from server (BadRequest): previous terminated container \"octavia-db-init-1\" in pod \"octavia-db-init-wnz5h\" not found",
                                "openstack/octavia-db-sync-rjq45/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"octavia-db-sync-rjq45\" not found",
                                "openstack/octavia-db-sync-rjq45/octavia-db-sync",
                                "Error from server (BadRequest): previous terminated container \"octavia-db-sync\" in pod \"octavia-db-sync-rjq45\" not found",
                                "openstack/octavia-health-manager-default-twmks/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"octavia-health-manager-default-twmks\" not found",
                                "openstack/octavia-health-manager-default-twmks/octavia-health-manager-get-port",
                                "Error from server (BadRequest): previous terminated container \"octavia-health-manager-get-port\" in pod \"octavia-health-manager-default-twmks\" not found",
                                "openstack/octavia-health-manager-default-twmks/octavia-health-manager-nic-init",
                                "Error from server (BadRequest): previous terminated container \"octavia-health-manager-nic-init\" in pod \"octavia-health-manager-default-twmks\" not found",
                                "openstack/octavia-health-manager-default-twmks/octavia-health-manager",
                                "Error from server (BadRequest): previous terminated container \"octavia-health-manager\" in pod \"octavia-health-manager-default-twmks\" not found",
                                "openstack/octavia-housekeeping-87b98c47b-vqwct/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"octavia-housekeeping-87b98c47b-vqwct\" not found",
                                "openstack/octavia-housekeeping-87b98c47b-vqwct/octavia-housekeeping",
                                "Error from server (BadRequest): previous terminated container \"octavia-housekeeping\" in pod \"octavia-housekeeping-87b98c47b-vqwct\" not found",
                                "openstack/octavia-ks-endpoints-jdlzw/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"octavia-ks-endpoints-jdlzw\" not found",
                                "openstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-admin",
                                "Error from server (BadRequest): previous terminated container \"load-balancer-ks-endpoints-admin\" in pod \"octavia-ks-endpoints-jdlzw\" not found",
                                "openstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-internal",
                                "Error from server (BadRequest): previous terminated container \"load-balancer-ks-endpoints-internal\" in pod \"octavia-ks-endpoints-jdlzw\" not found",
                                "openstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-public",
                                "Error from server (BadRequest): previous terminated container \"load-balancer-ks-endpoints-public\" in pod \"octavia-ks-endpoints-jdlzw\" not found",
                                "openstack/octavia-ks-service-rkdp9/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"octavia-ks-service-rkdp9\" not found",
                                "openstack/octavia-ks-service-rkdp9/load-balancer-ks-service-registration",
                                "Error from server (BadRequest): previous terminated container \"load-balancer-ks-service-registration\" in pod \"octavia-ks-service-rkdp9\" not found",
                                "openstack/octavia-ks-user-tjl52/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"octavia-ks-user-tjl52\" not found",
                                "openstack/octavia-ks-user-tjl52/octavia-ks-user",
                                "Error from server (BadRequest): previous terminated container \"octavia-ks-user\" in pod \"octavia-ks-user-tjl52\" not found",
                                "openstack/octavia-rabbit-init-vdqxf/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"octavia-rabbit-init-vdqxf\" not found",
                                "openstack/octavia-rabbit-init-vdqxf/rabbit-init",
                                "Error from server (BadRequest): previous terminated container \"rabbit-init\" in pod \"octavia-rabbit-init-vdqxf\" not found",
                                "openstack/octavia-worker-774cddbcdc-qxl6k/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"octavia-worker-774cddbcdc-qxl6k\" not found",
                                "openstack/octavia-worker-774cddbcdc-qxl6k/octavia-worker",
                                "Error from server (BadRequest): previous terminated container \"octavia-worker\" in pod \"octavia-worker-774cddbcdc-qxl6k\" not found",
                                "openstack/openstack-database-exporter-7c944bc9f-w2bdb/openstack-database-exporter",
                                "Error from server (BadRequest): previous terminated container \"openstack-database-exporter\" in pod \"openstack-database-exporter-7c944bc9f-w2bdb\" not found",
                                "openstack/openstack-exporter-74676fb4b4-jrkwh/build-config",
                                "Error from server (BadRequest): previous terminated container \"build-config\" in pod \"openstack-exporter-74676fb4b4-jrkwh\" not found",
                                "openstack/openstack-exporter-74676fb4b4-jrkwh/openstack-exporter",
                                "Error from server (BadRequest): previous terminated container \"openstack-exporter\" in pod \"openstack-exporter-74676fb4b4-jrkwh\" not found",
                                "openstack/openvswitch-gj98d/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"openvswitch-gj98d\" not found",
                                "openstack/openvswitch-gj98d/openvswitch-db-perms",
                                "Error from server (BadRequest): previous terminated container \"openvswitch-db-perms\" in pod \"openvswitch-gj98d\" not found",
                                "openstack/openvswitch-gj98d/openvswitch-vswitchd-modules",
                                "Error from server (BadRequest): previous terminated container \"openvswitch-vswitchd-modules\" in pod \"openvswitch-gj98d\" not found",
                                "openstack/openvswitch-gj98d/openvswitch-db",
                                "Error from server (BadRequest): previous terminated container \"openvswitch-db\" in pod \"openvswitch-gj98d\" not found",
                                "openstack/openvswitch-gj98d/openvswitch-vswitchd",
                                "Error from server (BadRequest): previous terminated container \"openvswitch-vswitchd\" in pod \"openvswitch-gj98d\" not found",
                                "openstack/ovn-controller-6mbd4/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"ovn-controller-6mbd4\" not found",
                                "openstack/ovn-controller-6mbd4/get-gw-enabled",
                                "Error from server (BadRequest): previous terminated container \"get-gw-enabled\" in pod \"ovn-controller-6mbd4\" not found",
                                "openstack/ovn-controller-6mbd4/controller-init",
                                "Error from server (BadRequest): previous terminated container \"controller-init\" in pod \"ovn-controller-6mbd4\" not found",
                                "openstack/ovn-controller-6mbd4/controller",
                                "Error from server (BadRequest): previous terminated container \"controller\" in pod \"ovn-controller-6mbd4\" not found",
                                "openstack/ovn-controller-6mbd4/vector",
                                "Error from server (BadRequest): previous terminated container \"vector\" in pod \"ovn-controller-6mbd4\" not found",
                                "openstack/ovn-controller-6mbd4/log-parser",
                                "Error from server (BadRequest): previous terminated container \"log-parser\" in pod \"ovn-controller-6mbd4\" not found",
                                "openstack/ovn-northd-6c6687ddd6-7grhs/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"ovn-northd-6c6687ddd6-7grhs\" not found",
                                "openstack/ovn-northd-6c6687ddd6-7grhs/northd",
                                "Error from server (BadRequest): previous terminated container \"northd\" in pod \"ovn-northd-6c6687ddd6-7grhs\" not found",
                                "openstack/ovn-ovsdb-nb-0/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"ovn-ovsdb-nb-0\" not found",
                                "openstack/ovn-ovsdb-nb-0/ovsdb",
                                "Error from server (BadRequest): previous terminated container \"ovsdb\" in pod \"ovn-ovsdb-nb-0\" not found",
                                "openstack/ovn-ovsdb-sb-0/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"ovn-ovsdb-sb-0\" not found",
                                "openstack/ovn-ovsdb-sb-0/ovsdb",
                                "Error from server (BadRequest): previous terminated container \"ovsdb\" in pod \"ovn-ovsdb-sb-0\" not found",
                                "openstack/percona-xtradb-haproxy-0/pxc-init",
                                "Error from server (BadRequest): previous terminated container \"pxc-init\" in pod \"percona-xtradb-haproxy-0\" not found",
                                "openstack/percona-xtradb-haproxy-0/haproxy-init",
                                "Error from server (BadRequest): previous terminated container \"haproxy-init\" in pod \"percona-xtradb-haproxy-0\" not found",
                                "openstack/percona-xtradb-haproxy-0/haproxy",
                                "Error from server (BadRequest): previous terminated container \"haproxy\" in pod \"percona-xtradb-haproxy-0\" not found",
                                "openstack/percona-xtradb-haproxy-0/pxc-monit",
                                "Error from server (BadRequest): previous terminated container \"pxc-monit\" in pod \"percona-xtradb-haproxy-0\" not found",
                                "openstack/percona-xtradb-pxc-0/pxc-init",
                                "Error from server (BadRequest): previous terminated container \"pxc-init\" in pod \"percona-xtradb-pxc-0\" not found",
                                "openstack/percona-xtradb-pxc-0/pxc",
                                "Error from server (BadRequest): previous terminated container \"pxc\" in pod \"percona-xtradb-pxc-0\" not found",
                                "openstack/percona-xtradb-pxc-0/exporter",
                                "Error from server (BadRequest): previous terminated container \"exporter\" in pod \"percona-xtradb-pxc-0\" not found",
                                "openstack/placement-api-75695696c6-brsxj/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"placement-api-75695696c6-brsxj\" not found",
                                "openstack/placement-api-75695696c6-brsxj/placement-api",
                                "Error from server (BadRequest): previous terminated container \"placement-api\" in pod \"placement-api-75695696c6-brsxj\" not found",
                                "openstack/placement-db-init-89t92/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"placement-db-init-89t92\" not found",
                                "openstack/placement-db-init-89t92/placement-db-init-0",
                                "Error from server (BadRequest): previous terminated container \"placement-db-init-0\" in pod \"placement-db-init-89t92\" not found",
                                "openstack/placement-db-sync-nvqjv/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"placement-db-sync-nvqjv\" not found",
                                "openstack/placement-db-sync-nvqjv/placement-db-sync",
                                "Error from server (BadRequest): previous terminated container \"placement-db-sync\" in pod \"placement-db-sync-nvqjv\" not found",
                                "openstack/placement-ks-endpoints-jmfl7/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"placement-ks-endpoints-jmfl7\" not found",
                                "openstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-admin",
                                "Error from server (BadRequest): previous terminated container \"placement-ks-endpoints-admin\" in pod \"placement-ks-endpoints-jmfl7\" not found",
                                "openstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-internal",
                                "Error from server (BadRequest): previous terminated container \"placement-ks-endpoints-internal\" in pod \"placement-ks-endpoints-jmfl7\" not found",
                                "openstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-public",
                                "Error from server (BadRequest): previous terminated container \"placement-ks-endpoints-public\" in pod \"placement-ks-endpoints-jmfl7\" not found",
                                "openstack/placement-ks-service-qdjdz/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"placement-ks-service-qdjdz\" not found",
                                "openstack/placement-ks-service-qdjdz/placement-ks-service-registration",
                                "Error from server (BadRequest): previous terminated container \"placement-ks-service-registration\" in pod \"placement-ks-service-qdjdz\" not found",
                                "openstack/placement-ks-user-blkn9/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"placement-ks-user-blkn9\" not found",
                                "openstack/placement-ks-user-blkn9/placement-ks-user",
                                "Error from server (BadRequest): previous terminated container \"placement-ks-user\" in pod \"placement-ks-user-blkn9\" not found",
                                "openstack/pxc-operator-7cff949c8b-7zp4j/percona-xtradb-cluster-operator",
                                "Error from server (BadRequest): previous terminated container \"percona-xtradb-cluster-operator\" in pod \"pxc-operator-7cff949c8b-7zp4j\" not found",
                                "openstack/rabbitmq-barbican-server-0/setup-container",
                                "Error from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-barbican-server-0\" not found",
                                "openstack/rabbitmq-barbican-server-0/rabbitmq",
                                "Error from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-barbican-server-0\" not found",
                                "openstack/rabbitmq-cinder-server-0/setup-container",
                                "Error from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-cinder-server-0\" not found",
                                "openstack/rabbitmq-cinder-server-0/rabbitmq",
                                "Error from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-cinder-server-0\" not found",
                                "openstack/rabbitmq-cluster-operator-5448d56d95-vk9km/rabbitmq-cluster-operator",
                                "Error from server (BadRequest): previous terminated container \"rabbitmq-cluster-operator\" in pod \"rabbitmq-cluster-operator-5448d56d95-vk9km\" not found",
                                "openstack/rabbitmq-glance-server-0/setup-container",
                                "Error from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-glance-server-0\" not found",
                                "openstack/rabbitmq-glance-server-0/rabbitmq",
                                "Error from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-glance-server-0\" not found",
                                "openstack/rabbitmq-heat-server-0/setup-container",
                                "Error from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-heat-server-0\" not found",
                                "openstack/rabbitmq-heat-server-0/rabbitmq",
                                "Error from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-heat-server-0\" not found",
                                "openstack/rabbitmq-keystone-server-0/setup-container",
                                "Error from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-keystone-server-0\" not found",
                                "openstack/rabbitmq-keystone-server-0/rabbitmq",
                                "Error from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-keystone-server-0\" not found",
                                "openstack/rabbitmq-magnum-server-0/setup-container",
                                "Error from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-magnum-server-0\" not found",
                                "openstack/rabbitmq-magnum-server-0/rabbitmq",
                                "Error from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-magnum-server-0\" not found",
                                "openstack/rabbitmq-manila-server-0/setup-container",
                                "Error from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-manila-server-0\" not found",
                                "openstack/rabbitmq-manila-server-0/rabbitmq",
                                "Error from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-manila-server-0\" not found",
                                "openstack/rabbitmq-messaging-topology-operator-7f8596f788-84l9x/rabbitmq-cluster-operator",
                                "Error from server (BadRequest): previous terminated container \"rabbitmq-cluster-operator\" in pod \"rabbitmq-messaging-topology-operator-7f8596f788-84l9x\" not found",
                                "openstack/rabbitmq-neutron-server-0/setup-container",
                                "Error from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-neutron-server-0\" not found",
                                "openstack/rabbitmq-neutron-server-0/rabbitmq",
                                "Error from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-neutron-server-0\" not found",
                                "openstack/rabbitmq-nova-server-0/setup-container",
                                "Error from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-nova-server-0\" not found",
                                "openstack/rabbitmq-nova-server-0/rabbitmq",
                                "Error from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-nova-server-0\" not found",
                                "openstack/rabbitmq-octavia-server-0/setup-container",
                                "Error from server (BadRequest): previous terminated container \"setup-container\" in pod \"rabbitmq-octavia-server-0\" not found",
                                "openstack/rabbitmq-octavia-server-0/rabbitmq",
                                "Error from server (BadRequest): previous terminated container \"rabbitmq\" in pod \"rabbitmq-octavia-server-0\" not found",
                                "openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/make-container-crash-dir",
                                "Error from server (BadRequest): previous terminated container \"make-container-crash-dir\" in pod \"rook-ceph-crashcollector-instance-754c646bfd-htxl9\" not found",
                                "openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/chown-container-data-dir",
                                "Error from server (BadRequest): previous terminated container \"chown-container-data-dir\" in pod \"rook-ceph-crashcollector-instance-754c646bfd-htxl9\" not found",
                                "openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/ceph-crash",
                                "Error from server (BadRequest): previous terminated container \"ceph-crash\" in pod \"rook-ceph-crashcollector-instance-754c646bfd-htxl9\" not found",
                                "openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/chown-container-data-dir",
                                "Error from server (BadRequest): previous terminated container \"chown-container-data-dir\" in pod \"rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw\" not found",
                                "openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/rgw",
                                "Error from server (BadRequest): previous terminated container \"rgw\" in pod \"rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw\" not found",
                                "openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/log-collector",
                                "Error from server (BadRequest): previous terminated container \"log-collector\" in pod \"rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw\" not found",
                                "openstack/staffeln-api-6669c8779f-qgp4c/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"staffeln-api-6669c8779f-qgp4c\" not found",
                                "openstack/staffeln-api-6669c8779f-qgp4c/staffeln-api",
                                "Error from server (BadRequest): previous terminated container \"staffeln-api\" in pod \"staffeln-api-6669c8779f-qgp4c\" not found",
                                "openstack/staffeln-conductor-7b5d99bcd4-ws4sl/staffeln-conductor",
                                "Error from server (BadRequest): previous terminated container \"staffeln-conductor\" in pod \"staffeln-conductor-7b5d99bcd4-ws4sl\" not found",
                                "openstack/staffeln-db-init-p4pq4/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"staffeln-db-init-p4pq4\" not found",
                                "openstack/staffeln-db-init-p4pq4/staffeln-db-init-0",
                                "Error from server (BadRequest): previous terminated container \"staffeln-db-init-0\" in pod \"staffeln-db-init-p4pq4\" not found",
                                "openstack/staffeln-db-sync-khzx8/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"staffeln-db-sync-khzx8\" not found",
                                "openstack/staffeln-db-sync-khzx8/staffeln-db-sync",
                                "Error from server (BadRequest): previous terminated container \"staffeln-db-sync\" in pod \"staffeln-db-sync-khzx8\" not found",
                                "openstack/tempest-ks-user-kwbf6/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"tempest-ks-user-kwbf6\" not found",
                                "openstack/tempest-ks-user-kwbf6/tempest-ks-user",
                                "Error from server (BadRequest): previous terminated container \"tempest-ks-user\" in pod \"tempest-ks-user-kwbf6\" not found",
                                "openstack/tempest-run-tests-g5plh/init",
                                "Error from server (BadRequest): previous terminated container \"init\" in pod \"tempest-run-tests-g5plh\" not found",
                                "openstack/tempest-run-tests-g5plh/tempest-run-tests-init",
                                "Error from server (BadRequest): previous terminated container \"tempest-run-tests-init\" in pod \"tempest-run-tests-g5plh\" not found",
                                "openstack/tempest-run-tests-g5plh/tempest-run-tests",
                                "Error from server (BadRequest): previous terminated container \"tempest-run-tests\" in pod \"tempest-run-tests-g5plh\" not found",
                                "openstack/valkey-node-0/valkey",
                                "Error from server (BadRequest): previous terminated container \"valkey\" in pod \"valkey-node-0\" not found",
                                "openstack/valkey-node-0/sentinel",
                                "Error from server (BadRequest): previous terminated container \"sentinel\" in pod \"valkey-node-0\" not found",
                                "openstack/valkey-node-0/metrics",
                                "Error from server (BadRequest): previous terminated container \"metrics\" in pod \"valkey-node-0\" not found",
                                "orc-system/orc-controller-manager-6cb597b5d4-glhcz/manager",
                                "Error from server (BadRequest): previous terminated container \"manager\" in pod \"orc-controller-manager-6cb597b5d4-glhcz\" not found",
                                "rook-ceph/rook-ceph-operator-7b66cfb94c-tj94j/rook-ceph-operator",
                                "Error from server (BadRequest): previous terminated container \"rook-ceph-operator\" in pod \"rook-ceph-operator-7b66cfb94c-tj94j\" not found",
                                "secretgen-controller/secretgen-controller-5cf976ccc7-szs5h/secretgen-controller",
                                "Error from server (BadRequest): previous terminated container \"secretgen-controller\" in pod \"secretgen-controller-5cf976ccc7-szs5h\" not found"
                            ],
                            "zuul_log_id": "0242ac17-0011-aaa1-afc9-000000000023-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-00000000001f",
                        "name": "gather-pod-logs",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-pod-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:10.975330Z",
                            "start": "2026-03-01T22:28:58.250319Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-000000000023",
                        "name": "retrieve all kubernetes logs, current and previous (if they exist)"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "changed": true,
                            "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --rsh='/usr/bin/ssh -S none -o Port=22 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' --rsync-path='sudo -u root rsync' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.156:/tmp/logs/pod-logs /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/instance",
                            "invocation": {
                                "module_args": {
                                    "_local_rsync_password": null,
                                    "_local_rsync_path": "rsync",
                                    "_ssh_args": null,
                                    "_substitute_controller": false,
                                    "archive": true,
                                    "checksum": false,
                                    "compress": true,
                                    "copy_links": false,
                                    "delay_updates": true,
                                    "delete": false,
                                    "dest": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/instance",
                                    "dest_port": 22,
                                    "dirs": false,
                                    "existing_only": false,
                                    "group": null,
                                    "link_dest": null,
                                    "links": null,
                                    "mode": "pull",
                                    "owner": null,
                                    "partial": false,
                                    "perms": null,
                                    "private_key": null,
                                    "recursive": null,
                                    "rsync_opts": [],
                                    "rsync_path": "sudo -u root rsync",
                                    "rsync_timeout": 0,
                                    "set_remote_user": true,
                                    "src": "zuul@199.204.45.156:/tmp/logs/pod-logs",
                                    "ssh_connection_multiplexing": false,
                                    "times": null,
                                    "use_ssh_args": false,
                                    "verify_host": false
                                }
                            },
                            "msg": "cd+++++++++ pod-logs/\ncd+++++++++ pod-logs/auth-system/\ncd+++++++++ pod-logs/auth-system/keycloak-0/\n>f+++++++++ pod-logs/auth-system/keycloak-0/keycloak.txt\ncd+++++++++ pod-logs/capi-kubeadm-bootstrap-system/\ncd+++++++++ pod-logs/capi-kubeadm-bootstrap-system/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k/\n>f+++++++++ pod-logs/capi-kubeadm-bootstrap-system/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k/manager.txt\ncd+++++++++ pod-logs/capi-kubeadm-control-plane-system/\ncd+++++++++ pod-logs/capi-kubeadm-control-plane-system/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4/\n>f+++++++++ pod-logs/capi-kubeadm-control-plane-system/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4/manager.txt\ncd+++++++++ pod-logs/capi-system/\ncd+++++++++ pod-logs/capi-system/capi-controller-manager-bc4cf8c95-w8p6b/\n>f+++++++++ pod-logs/capi-system/capi-controller-manager-bc4cf8c95-w8p6b/manager.txt\ncd+++++++++ pod-logs/capo-system/\ncd+++++++++ pod-logs/capo-system/capo-controller-manager-6975759b4b-tkxrs/\n>f+++++++++ pod-logs/capo-system/capo-controller-manager-6975759b4b-tkxrs/manager.txt\ncd+++++++++ pod-logs/cert-manager/\ncd+++++++++ pod-logs/cert-manager/cert-manager-75c4c745bc-45s4r/\n>f+++++++++ pod-logs/cert-manager/cert-manager-75c4c745bc-45s4r/cert-manager-controller.txt\ncd+++++++++ pod-logs/cert-manager/cert-manager-cainjector-64b59ddb75-tl5x7/\n>f+++++++++ pod-logs/cert-manager/cert-manager-cainjector-64b59ddb75-tl5x7/cert-manager-cainjector.txt\ncd+++++++++ pod-logs/cert-manager/cert-manager-webhook-548949fc64-vkrlt/\n>f+++++++++ pod-logs/cert-manager/cert-manager-webhook-548949fc64-vkrlt/cert-manager-webhook.txt\ncd+++++++++ pod-logs/failed-pods/\ncd+++++++++ pod-logs/failed-pods/auth-system/\ncd+++++++++ pod-logs/failed-pods/auth-system/keycloak-0/\n>f+++++++++ pod-logs/failed-pods/auth-system/keycloak-0/keycloak.txt\ncd+++++++++ pod-logs/failed-pods/capi-kubeadm-bootstrap-system/\ncd+++++++++ pod-logs/failed-pods/capi-kubeadm-bootstrap-system/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k/\n>f+++++++++ pod-logs/failed-pods/capi-kubeadm-bootstrap-system/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k/manager.txt\ncd+++++++++ pod-logs/failed-pods/capi-kubeadm-control-plane-system/\ncd+++++++++ pod-logs/failed-pods/capi-kubeadm-control-plane-system/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4/\n>f+++++++++ pod-logs/failed-pods/capi-kubeadm-control-plane-system/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4/manager.txt\ncd+++++++++ pod-logs/failed-pods/capi-system/\ncd+++++++++ pod-logs/failed-pods/capi-system/capi-controller-manager-bc4cf8c95-w8p6b/\n>f+++++++++ pod-logs/failed-pods/capi-system/capi-controller-manager-bc4cf8c95-w8p6b/manager.txt\ncd+++++++++ pod-logs/failed-pods/capo-system/\ncd+++++++++ pod-logs/failed-pods/capo-system/capo-controller-manager-6975759b4b-tkxrs/\n>f+++++++++ pod-logs/failed-pods/capo-system/capo-controller-manager-6975759b4b-tkxrs/manager.txt\ncd+++++++++ pod-logs/failed-pods/cert-manager/\ncd+++++++++ pod-logs/failed-pods/cert-manager/cert-manager-75c4c745bc-45s4r/\n>f+++++++++ pod-logs/failed-pods/cert-manager/cert-manager-75c4c745bc-45s4r/cert-manager-controller.txt\ncd+++++++++ pod-logs/failed-pods/cert-manager/cert-manager-cainjector-64b59ddb75-tl5x7/\n>f+++++++++ pod-logs/failed-pods/cert-manager/cert-manager-cainjector-64b59ddb75-tl5x7/cert-manager-cainjector.txt\ncd+++++++++ pod-logs/failed-pods/cert-manager/cert-manager-webhook-548949fc64-vkrlt/\n>f+++++++++ pod-logs/failed-pods/cert-manager/cert-manager-webhook-548949fc64-vkrlt/cert-manager-webhook.txt\ncd+++++++++ pod-logs/failed-pods/ingress-nginx/\ncd+++++++++ pod-logs/failed-pods/ingress-nginx/ingress-nginx-controller-j4bqv/\n>f+++++++++ pod-logs/failed-pods/ingress-nginx/ingress-nginx-controller-j4bqv/controller.txt\ncd+++++++++ pod-logs/failed-pods/ingress-nginx/ingress-nginx-defaultbackend-6987ff55cf-gpx4l/\n>f+++++++++ pod-logs/failed-pods/ingress-nginx/ingress-nginx-defaultbackend-6987ff55cf-gpx4l/ingress-nginx-default-backend.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/\ncd+++++++++ pod-logs/failed-pods/kube-system/cilium-operator-869df985b8-kszk2/\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-operator-869df985b8-kszk2/cilium-operator.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/apply-sysctl-overwrites.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/cilium-agent.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/clean-cilium-state.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/config.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/install-cni-binaries.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/mount-bpf-fs.txt\n>f+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/mount-cgroup.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/coredns-67659f764b-6f2mm/\n>f+++++++++ pod-logs/failed-pods/kube-system/coredns-67659f764b-6f2mm/coredns.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/coredns-67659f764b-j6fp4/\n>f+++++++++ pod-logs/failed-pods/kube-system/coredns-67659f764b-j6fp4/coredns.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/etcd-instance/\n>f+++++++++ pod-logs/failed-pods/kube-system/etcd-instance/etcd.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/kube-apiserver-instance/\n>f+++++++++ pod-logs/failed-pods/kube-system/kube-apiserver-instance/kube-apiserver.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/kube-controller-manager-instance/\n>f+++++++++ pod-logs/failed-pods/kube-system/kube-controller-manager-instance/kube-controller-manager.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/kube-proxy-sp2vs/\n>f+++++++++ pod-logs/failed-pods/kube-system/kube-proxy-sp2vs/kube-proxy.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/kube-scheduler-instance/\n>f+++++++++ pod-logs/failed-pods/kube-system/kube-scheduler-instance/kube-scheduler.txt\ncd+++++++++ pod-logs/failed-pods/kube-system/kube-vip-instance/\n>f+++++++++ pod-logs/failed-pods/kube-system/kube-vip-instance/kube-vip.txt\ncd+++++++++ pod-logs/failed-pods/local-path-storage/\ncd+++++++++ pod-logs/failed-pods/local-path-storage/local-path-provisioner-679c578f5-7h8w5/\n>f+++++++++ pod-logs/failed-pods/local-path-storage/local-path-provisioner-679c578f5-7h8w5/local-path-provisioner.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/\ncd+++++++++ pod-logs/failed-pods/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/\n>f+++++++++ pod-logs/failed-pods/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/alertmanager.txt\n>f+++++++++ pod-logs/failed-pods/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/config-reloader.txt\n>f+++++++++ pod-logs/failed-pods/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/init-config-reloader.txt\n>f+++++++++ pod-logs/failed-pods/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/oauth2-proxy.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/goldpinger-7jzp8/\n>f+++++++++ pod-logs/failed-pods/monitoring/goldpinger-7jzp8/goldpinger-daemon.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/\n>f+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana-sc-dashboard.txt\n>f+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana-sc-datasources.txt\n>f+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m/\n>f+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m/kube-state-metrics.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-operator-cd88cf4bf-lzh7g/\n>f+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-operator-cd88cf4bf-lzh7g/kube-prometheus-stack.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/\n>f+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/node-exporter.txt\n>f+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/pod-tls-sidecar.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/loki-0/\n>f+++++++++ pod-logs/failed-pods/monitoring/loki-0/loki.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/loki-chunks-cache-0/\n>f+++++++++ pod-logs/failed-pods/monitoring/loki-chunks-cache-0/exporter.txt\n>f+++++++++ pod-logs/failed-pods/monitoring/loki-chunks-cache-0/memcached.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/loki-gateway-cf54cb88c-zv654/\n>f+++++++++ pod-logs/failed-pods/monitoring/loki-gateway-cf54cb88c-zv654/nginx.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/loki-results-cache-0/\n>f+++++++++ pod-logs/failed-pods/monitoring/loki-results-cache-0/exporter.txt\n>f+++++++++ pod-logs/failed-pods/monitoring/loki-results-cache-0/memcached.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/node-feature-discovery-gc-6675cbb6d9-zv9sn/\n>f+++++++++ pod-logs/failed-pods/monitoring/node-feature-discovery-gc-6675cbb6d9-zv9sn/gc.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/node-feature-discovery-master-8665476dbc-t4z5z/\n>f+++++++++ pod-logs/failed-pods/monitoring/node-feature-discovery-master-8665476dbc-t4z5z/master.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/node-feature-discovery-worker-p8lmk/\n>f+++++++++ pod-logs/failed-pods/monitoring/node-feature-discovery-worker-p8lmk/worker.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/prometheus-kube-prometheus-stack-prometheus-0/\n>f+++++++++ pod-logs/failed-pods/monitoring/prometheus-kube-prometheus-stack-prometheus-0/config-reloader.txt\n>f+++++++++ pod-logs/failed-pods/monitoring/prometheus-kube-prometheus-stack-prometheus-0/init-config-reloader.txt\n>f+++++++++ pod-logs/failed-pods/monitoring/prometheus-kube-prometheus-stack-prometheus-0/oauth2-proxy.txt\n>f+++++++++ pod-logs/failed-pods/monitoring/prometheus-kube-prometheus-stack-prometheus-0/pod-tls-sidecar.txt\n>f+++++++++ pod-logs/failed-pods/monitoring/prometheus-kube-prometheus-stack-prometheus-0/prometheus.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/prometheus-pushgateway-7b8659c68b-28dht/\n>f+++++++++ pod-logs/failed-pods/monitoring/prometheus-pushgateway-7b8659c68b-28dht/pushgateway.txt\ncd+++++++++ pod-logs/failed-pods/monitoring/vector-qzjms/\n>f+++++++++ pod-logs/failed-pods/monitoring/vector-qzjms/vector.txt\ncd+++++++++ pod-logs/failed-pods/openstack/\ncd+++++++++ pod-logs/failed-pods/openstack/barbican-api-775987496d-z6jqv/\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-api-775987496d-z6jqv/barbican-api.txt\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-api-775987496d-z6jqv/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/barbican-db-init-nm8k6/\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-db-init-nm8k6/barbican-db-init-0.txt\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-db-init-nm8k6/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/barbican-db-sync-452x5/\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-db-sync-452x5/barbican-db-sync.txt\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-db-sync-452x5/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/barbican-ks-endpoints-w2ffg/\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-endpoints-w2ffg/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-public.txt\ncd+++++++++ pod-logs/failed-pods/openstack/barbican-ks-service-8pm7j/\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-service-8pm7j/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-service-8pm7j/key-manager-ks-service-registration.txt\ncd+++++++++ pod-logs/failed-pods/openstack/barbican-ks-user-fszfr/\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-user-fszfr/barbican-ks-user.txt\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-user-fszfr/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/barbican-rabbit-init-j5qmd/\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-rabbit-init-j5qmd/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/barbican-rabbit-init-j5qmd/rabbit-init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/cinder-api-86d7694f66-j97gj/\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-api-86d7694f66-j97gj/ceph-coordination-volume-perms.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-api-86d7694f66-j97gj/cinder-api.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-api-86d7694f66-j97gj/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/cinder-backup-dcfd7dfb7-sdwkc/\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-backup-keyring-placement.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-coordination-volume-perms.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-keyring-placement-rbd1.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-dcfd7dfb7-sdwkc/cinder-backup.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-dcfd7dfb7-sdwkc/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/cinder-backup-storage-init-zmnkh/\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-storage-init-zmnkh/ceph-keyring-placement.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-storage-init-zmnkh/cinder-backup-storage-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-storage-init-zmnkh/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/cinder-bootstrap-wng86/\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-bootstrap-wng86/bootstrap.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-bootstrap-wng86/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/cinder-create-internal-tenant-6vgll/\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-create-internal-tenant-6vgll/create-internal-tenant.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-create-internal-tenant-6vgll/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/cinder-db-init-mzm5b/\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-db-init-mzm5b/cinder-db-init-0.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-db-init-mzm5b/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/cinder-db-sync-mz6ls/\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-db-sync-mz6ls/cinder-db-sync.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-db-sync-mz6ls/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/cinder-ks-endpoints-xv2tb/\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-endpoints-xv2tb/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-public.txt\ncd+++++++++ pod-logs/failed-pods/openstack/cinder-ks-service-dlcxz/\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-service-dlcxz/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-service-dlcxz/volumev3-ks-service-registration.txt\ncd+++++++++ pod-logs/failed-pods/openstack/cinder-ks-user-5bd5g/\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-user-5bd5g/cinder-ks-user.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-user-5bd5g/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/cinder-rabbit-init-l4fpm/\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-rabbit-init-l4fpm/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-rabbit-init-l4fpm/rabbit-init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/cinder-scheduler-586f444995-p7grf/\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-scheduler-586f444995-p7grf/ceph-coordination-volume-perms.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-scheduler-586f444995-p7grf/cinder-scheduler.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-scheduler-586f444995-p7grf/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/cinder-storage-init-vt6br/\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-storage-init-vt6br/ceph-keyring-placement.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-storage-init-vt6br/cinder-storage-init-rbd1.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-storage-init-vt6br/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/cinder-volume-66dc847979-qgp4l/\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-volume-66dc847979-qgp4l/ceph-coordination-volume-perms.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-volume-66dc847979-qgp4l/ceph-keyring-placement-rbd1.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-volume-66dc847979-qgp4l/cinder-volume.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-volume-66dc847979-qgp4l/init-cinder-conf.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-volume-66dc847979-qgp4l/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/cinder-volume-usage-audit-29540045-jbmvh/\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-volume-usage-audit-29540045-jbmvh/cinder-volume-usage-audit.txt\n>f+++++++++ pod-logs/failed-pods/openstack/cinder-volume-usage-audit-29540045-jbmvh/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/glance-api-65d579bfc8-6x76l/\n>f+++++++++ pod-logs/failed-pods/openstack/glance-api-65d579bfc8-6x76l/ceph-keyring-placement.txt\n>f+++++++++ pod-logs/failed-pods/openstack/glance-api-65d579bfc8-6x76l/glance-api.txt\n>f+++++++++ pod-logs/failed-pods/openstack/glance-api-65d579bfc8-6x76l/glance-perms.txt\n>f+++++++++ pod-logs/failed-pods/openstack/glance-api-65d579bfc8-6x76l/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/glance-db-init-wbpff/\n>f+++++++++ pod-logs/failed-pods/openstack/glance-db-init-wbpff/glance-db-init-0.txt\n>f+++++++++ pod-logs/failed-pods/openstack/glance-db-init-wbpff/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/glance-db-sync-gk84f/\n>f+++++++++ pod-logs/failed-pods/openstack/glance-db-sync-gk84f/glance-db-sync.txt\n>f+++++++++ pod-logs/failed-pods/openstack/glance-db-sync-gk84f/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/glance-ks-endpoints-dq2cc/\n>f+++++++++ pod-logs/failed-pods/openstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/failed-pods/openstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/failed-pods/openstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-public.txt\n>f+++++++++ pod-logs/failed-pods/openstack/glance-ks-endpoints-dq2cc/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/glance-ks-service-5h6bw/\n>f+++++++++ pod-logs/failed-pods/openstack/glance-ks-service-5h6bw/image-ks-service-registration.txt\n>f+++++++++ pod-logs/failed-pods/openstack/glance-ks-service-5h6bw/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/glance-ks-user-lcfxr/\n>f+++++++++ pod-logs/failed-pods/openstack/glance-ks-user-lcfxr/glance-ks-user.txt\n>f+++++++++ pod-logs/failed-pods/openstack/glance-ks-user-lcfxr/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/glance-metadefs-load-476tp/\n>f+++++++++ pod-logs/failed-pods/openstack/glance-metadefs-load-476tp/glance-metadefs-load.txt\n>f+++++++++ pod-logs/failed-pods/openstack/glance-metadefs-load-476tp/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/glance-rabbit-init-c6rjt/\n>f+++++++++ pod-logs/failed-pods/openstack/glance-rabbit-init-c6rjt/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/glance-rabbit-init-c6rjt/rabbit-init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/glance-storage-init-hdcpc/\n>f+++++++++ pod-logs/failed-pods/openstack/glance-storage-init-hdcpc/ceph-keyring-placement.txt\n>f+++++++++ pod-logs/failed-pods/openstack/glance-storage-init-hdcpc/glance-storage-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/glance-storage-init-hdcpc/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/heat-api-6d65f9477-kmbkl/\n>f+++++++++ pod-logs/failed-pods/openstack/heat-api-6d65f9477-kmbkl/heat-api.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-api-6d65f9477-kmbkl/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/heat-bootstrap-9dwg2/\n>f+++++++++ pod-logs/failed-pods/openstack/heat-bootstrap-9dwg2/bootstrap.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-bootstrap-9dwg2/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/heat-cfn-f44db7787-t8f7m/\n>f+++++++++ pod-logs/failed-pods/openstack/heat-cfn-f44db7787-t8f7m/heat-cfn.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-cfn-f44db7787-t8f7m/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/heat-db-init-fk8qw/\n>f+++++++++ pod-logs/failed-pods/openstack/heat-db-init-fk8qw/heat-db-init-0.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-db-init-fk8qw/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/heat-db-sync-cxmcb/\n>f+++++++++ pod-logs/failed-pods/openstack/heat-db-sync-cxmcb/heat-db-sync.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-db-sync-cxmcb/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/heat-domain-ks-user-tq2c5/\n>f+++++++++ pod-logs/failed-pods/openstack/heat-domain-ks-user-tq2c5/heat-ks-domain-user.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-domain-ks-user-tq2c5/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/heat-engine-64f8b77bfb-wngkr/\n>f+++++++++ pod-logs/failed-pods/openstack/heat-engine-64f8b77bfb-wngkr/heat-engine.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-engine-64f8b77bfb-wngkr/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540055-p9pq9/\n>f+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540055-p9pq9/heat-engine-cleaner.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540055-p9pq9/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540060-z4g95/\n>f+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540060-z4g95/heat-engine-cleaner.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540060-z4g95/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540065-rcjr2/\n>f+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540065-rcjr2/heat-engine-cleaner.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540065-rcjr2/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/\n>f+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-public.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-public.txt\ncd+++++++++ pod-logs/failed-pods/openstack/heat-ks-service-8pxqz/\n>f+++++++++ pod-logs/failed-pods/openstack/heat-ks-service-8pxqz/cloudformation-ks-service-registration.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-ks-service-8pxqz/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-ks-service-8pxqz/orchestration-ks-service-registration.txt\ncd+++++++++ pod-logs/failed-pods/openstack/heat-ks-user-tfk98/\n>f+++++++++ pod-logs/failed-pods/openstack/heat-ks-user-tfk98/heat-ks-user.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-ks-user-tfk98/heat-trustee-ks-user.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-ks-user-tfk98/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/heat-rabbit-init-rbl9n/\n>f+++++++++ pod-logs/failed-pods/openstack/heat-rabbit-init-rbl9n/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-rabbit-init-rbl9n/rabbit-init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/heat-trusts-czrrv/\n>f+++++++++ pod-logs/failed-pods/openstack/heat-trusts-czrrv/heat-trusts.txt\n>f+++++++++ pod-logs/failed-pods/openstack/heat-trusts-czrrv/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/horizon-8cdd7b888-bvzvx/\n>f+++++++++ pod-logs/failed-pods/openstack/horizon-8cdd7b888-bvzvx/horizon.txt\n>f+++++++++ pod-logs/failed-pods/openstack/horizon-8cdd7b888-bvzvx/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/horizon-db-init-s5pbw/\n>f+++++++++ pod-logs/failed-pods/openstack/horizon-db-init-s5pbw/horizon-db-init-0.txt\n>f+++++++++ pod-logs/failed-pods/openstack/horizon-db-init-s5pbw/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/horizon-db-sync-bgr2g/\n>f+++++++++ pod-logs/failed-pods/openstack/horizon-db-sync-bgr2g/horizon-db-sync.txt\n>f+++++++++ pod-logs/failed-pods/openstack/horizon-db-sync-bgr2g/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/keepalived-7jdfz/\n>f+++++++++ pod-logs/failed-pods/openstack/keepalived-7jdfz/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/keepalived-7jdfz/keepalived.txt\n>f+++++++++ pod-logs/failed-pods/openstack/keepalived-7jdfz/wait-for-ip.txt\ncd+++++++++ pod-logs/failed-pods/openstack/keystone-api-c4656754c-mqbxm/\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-api-c4656754c-mqbxm/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-api-c4656754c-mqbxm/keystone-api.txt\ncd+++++++++ pod-logs/failed-pods/openstack/keystone-bootstrap-mdtrx/\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-bootstrap-mdtrx/bootstrap.txt\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-bootstrap-mdtrx/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/keystone-credential-setup-6xsvx/\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-credential-setup-6xsvx/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-credential-setup-6xsvx/keystone-credential-setup.txt\ncd+++++++++ pod-logs/failed-pods/openstack/keystone-db-init-z5mwz/\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-db-init-z5mwz/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-db-init-z5mwz/keystone-db-init-0.txt\ncd+++++++++ pod-logs/failed-pods/openstack/keystone-db-sync-zsq8z/\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-db-sync-zsq8z/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-db-sync-zsq8z/keystone-db-sync.txt\ncd+++++++++ pod-logs/failed-pods/openstack/keystone-domain-manage-v865d/\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-domain-manage-v865d/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-domain-manage-v865d/keystone-domain-manage.txt\ncd+++++++++ pod-logs/failed-pods/openstack/keystone-fernet-setup-5rfqs/\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-fernet-setup-5rfqs/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-fernet-setup-5rfqs/keystone-fernet-setup.txt\ncd+++++++++ pod-logs/failed-pods/openstack/keystone-rabbit-init-m44qz/\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-rabbit-init-m44qz/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/keystone-rabbit-init-m44qz/rabbit-init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/\n>f+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/ceph-admin-keyring-placement.txt\n>f+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/ceph-keyring-placement.txt\n>f+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/init-dynamic-options.txt\n>f+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/libvirt-exporter.txt\n>f+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/libvirt.txt\n>f+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/tls-sidecar.txt\ncd+++++++++ pod-logs/failed-pods/openstack/magnum-api-8549df7884-9b2zc/\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-api-8549df7884-9b2zc/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-api-8549df7884-9b2zc/magnum-api.txt\ncd+++++++++ pod-logs/failed-pods/openstack/magnum-cluster-api-proxy-z2flh/\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-cluster-api-proxy-z2flh/magnum-cluster-api-proxy.txt\ncd+++++++++ pod-logs/failed-pods/openstack/magnum-conductor-0/\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-conductor-0/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-conductor-0/magnum-conductor-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-conductor-0/magnum-conductor.txt\ncd+++++++++ pod-logs/failed-pods/openstack/magnum-db-init-dshrc/\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-db-init-dshrc/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-db-init-dshrc/magnum-db-init-0.txt\ncd+++++++++ pod-logs/failed-pods/openstack/magnum-db-sync-8ttpk/\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-db-sync-8ttpk/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-db-sync-8ttpk/magnum-db-sync.txt\ncd+++++++++ pod-logs/failed-pods/openstack/magnum-domain-ks-user-vp8f2/\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-domain-ks-user-vp8f2/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-domain-ks-user-vp8f2/magnum-ks-domain-user.txt\ncd+++++++++ pod-logs/failed-pods/openstack/magnum-ks-endpoints-jvzvf/\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-public.txt\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-endpoints-jvzvf/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/magnum-ks-service-vdn67/\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-service-vdn67/container-infra-ks-service-registration.txt\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-service-vdn67/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/magnum-ks-user-4wvtj/\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-user-4wvtj/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-user-4wvtj/magnum-ks-user.txt\ncd+++++++++ pod-logs/failed-pods/openstack/magnum-rabbit-init-w7jc7/\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-rabbit-init-w7jc7/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-rabbit-init-w7jc7/rabbit-init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/magnum-registry-c45778976-2zz96/\n>f+++++++++ pod-logs/failed-pods/openstack/magnum-registry-c45778976-2zz96/registry.txt\ncd+++++++++ pod-logs/failed-pods/openstack/manila-api-5cdf958bd9-hmbmb/\n>f+++++++++ pod-logs/failed-pods/openstack/manila-api-5cdf958bd9-hmbmb/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-api-5cdf958bd9-hmbmb/manila-api.txt\ncd+++++++++ pod-logs/failed-pods/openstack/manila-bootstrap-5wn97/\n>f+++++++++ pod-logs/failed-pods/openstack/manila-bootstrap-5wn97/bootstrap.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-bootstrap-5wn97/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/manila-data-75cbc955bd-27jjw/\n>f+++++++++ pod-logs/failed-pods/openstack/manila-data-75cbc955bd-27jjw/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-data-75cbc955bd-27jjw/manila-data.txt\ncd+++++++++ pod-logs/failed-pods/openstack/manila-db-init-pbdm8/\n>f+++++++++ pod-logs/failed-pods/openstack/manila-db-init-pbdm8/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-db-init-pbdm8/manila-db-init-0.txt\ncd+++++++++ pod-logs/failed-pods/openstack/manila-db-sync-rm9mz/\n>f+++++++++ pod-logs/failed-pods/openstack/manila-db-sync-rm9mz/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-db-sync-rm9mz/manila-db-sync.txt\ncd+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/\n>f+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-public.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-public.txt\ncd+++++++++ pod-logs/failed-pods/openstack/manila-ks-service-g7svt/\n>f+++++++++ pod-logs/failed-pods/openstack/manila-ks-service-g7svt/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-ks-service-g7svt/share-ks-service-registration.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-ks-service-g7svt/sharev2-ks-service-registration.txt\ncd+++++++++ pod-logs/failed-pods/openstack/manila-ks-user-pr9mg/\n>f+++++++++ pod-logs/failed-pods/openstack/manila-ks-user-pr9mg/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-ks-user-pr9mg/manila-ks-user.txt\ncd+++++++++ pod-logs/failed-pods/openstack/manila-rabbit-init-74vjs/\n>f+++++++++ pod-logs/failed-pods/openstack/manila-rabbit-init-74vjs/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-rabbit-init-74vjs/rabbit-init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/manila-scheduler-5b584c8656-mmnnd/\n>f+++++++++ pod-logs/failed-pods/openstack/manila-scheduler-5b584c8656-mmnnd/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-scheduler-5b584c8656-mmnnd/manila-scheduler.txt\ncd+++++++++ pod-logs/failed-pods/openstack/manila-share-68879775b-rc6q9/\n>f+++++++++ pod-logs/failed-pods/openstack/manila-share-68879775b-rc6q9/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-share-68879775b-rc6q9/manila-share-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/manila-share-68879775b-rc6q9/manila-share.txt\ncd+++++++++ pod-logs/failed-pods/openstack/memcached-memcached-6479589586-9sxjx/\n>f+++++++++ pod-logs/failed-pods/openstack/memcached-memcached-6479589586-9sxjx/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/memcached-memcached-6479589586-9sxjx/memcached-exporter.txt\n>f+++++++++ pod-logs/failed-pods/openstack/memcached-memcached-6479589586-9sxjx/memcached.txt\ncd+++++++++ pod-logs/failed-pods/openstack/neutron-db-init-l7c9v/\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-db-init-l7c9v/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-db-init-l7c9v/neutron-db-init-0.txt\ncd+++++++++ pod-logs/failed-pods/openstack/neutron-db-sync-brwb5/\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-db-sync-brwb5/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-db-sync-brwb5/neutron-db-sync.txt\ncd+++++++++ pod-logs/failed-pods/openstack/neutron-ks-endpoints-dstkg/\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-endpoints-dstkg/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-public.txt\ncd+++++++++ pod-logs/failed-pods/openstack/neutron-ks-service-sq4tp/\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-service-sq4tp/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-service-sq4tp/network-ks-service-registration.txt\ncd+++++++++ pod-logs/failed-pods/openstack/neutron-ks-user-kcfc4/\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-user-kcfc4/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-user-kcfc4/neutron-ks-user.txt\ncd+++++++++ pod-logs/failed-pods/openstack/neutron-netns-cleanup-cron-default-8frwf/\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-netns-cleanup-cron-default-8frwf/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-netns-cleanup-cron-default-8frwf/neutron-netns-cleanup-cron.txt\ncd+++++++++ pod-logs/failed-pods/openstack/neutron-ovn-metadata-agent-default-flhb5/\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-ovn-metadata-agent-default-flhb5/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-ovn-metadata-agent-default-flhb5/neutron-metadata-agent-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-ovn-metadata-agent-default-flhb5/neutron-ovn-metadata-agent.txt\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-ovn-metadata-agent-default-flhb5/ovn-neutron-init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/neutron-rabbit-init-rdnbf/\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-rabbit-init-rdnbf/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-rabbit-init-rdnbf/rabbit-init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/neutron-server-649c5974f6-5dkvl/\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-server-649c5974f6-5dkvl/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-server-649c5974f6-5dkvl/neutron-policy-server.txt\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-server-649c5974f6-5dkvl/neutron-server.txt\n>f+++++++++ pod-logs/failed-pods/openstack/neutron-server-649c5974f6-5dkvl/ovn-neutron-init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-api-metadata-546d94ddd7-btnrc/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-api-metadata-546d94ddd7-btnrc/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-api-metadata-546d94ddd7-btnrc/nova-api-metadata-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-api-metadata-546d94ddd7-btnrc/nova-api.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-api-osapi-99c7b7cd8-2lnzr/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-api-osapi-99c7b7cd8-2lnzr/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-api-osapi-99c7b7cd8-2lnzr/nova-osapi.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-bootstrap-trzqq/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-bootstrap-trzqq/bootstrap.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-bootstrap-trzqq/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-cell-setup-29540040-rtzd7/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-cell-setup-29540040-rtzd7/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-cell-setup-29540040-rtzd7/nova-cell-setup.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-cell-setup-j97qh/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-cell-setup-j97qh/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-cell-setup-j97qh/nova-cell-setup-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-cell-setup-j97qh/nova-cell-setup.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/ceph-admin-keyring-placement.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/ceph-keyring-placement.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/ceph-perms.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/nova-compute-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/nova-compute-ssh-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/nova-compute-ssh.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/nova-compute-vnc-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/nova-compute.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-conductor-5474cb4b8d-bxzhq/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-conductor-5474cb4b8d-bxzhq/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-conductor-5474cb4b8d-bxzhq/nova-conductor.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-db-init-b4sqh/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-db-init-b4sqh/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-db-init-b4sqh/nova-db-init-0.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-db-init-b4sqh/nova-db-init-1.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-db-init-b4sqh/nova-db-init-2.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-db-sync-2rbjc/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-db-sync-2rbjc/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-db-sync-2rbjc/nova-db-sync.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-ks-endpoints-zwcm6/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-public.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-ks-endpoints-zwcm6/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-ks-service-fmj77/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-ks-service-fmj77/compute-ks-service-registration.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-ks-service-fmj77/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-ks-user-t8xgz/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-ks-user-t8xgz/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-ks-user-t8xgz/nova-ks-user.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-novncproxy-85dd5b5965-z6hmj/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-novncproxy-85dd5b5965-z6hmj/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy-init-assets.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-rabbit-init-szpvx/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-rabbit-init-szpvx/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-rabbit-init-szpvx/rabbit-init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-scheduler-78775555d4-hb2j9/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-scheduler-78775555d4-hb2j9/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-scheduler-78775555d4-hb2j9/nova-scheduler.txt\ncd+++++++++ pod-logs/failed-pods/openstack/nova-service-cleaner-29540040-cxdd4/\n>f+++++++++ pod-logs/failed-pods/openstack/nova-service-cleaner-29540040-cxdd4/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/nova-service-cleaner-29540040-cxdd4/nova-service-cleaner.txt\ncd+++++++++ pod-logs/failed-pods/openstack/octavia-api-75db6578cf-m656r/\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-api-75db6578cf-m656r/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-api-75db6578cf-m656r/octavia-api.txt\ncd+++++++++ pod-logs/failed-pods/openstack/octavia-bootstrap-kwfv2/\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-bootstrap-kwfv2/bootstrap.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-bootstrap-kwfv2/init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/octavia-db-init-wnz5h/\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-db-init-wnz5h/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-db-init-wnz5h/octavia-db-init-0.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-db-init-wnz5h/octavia-db-init-1.txt\ncd+++++++++ pod-logs/failed-pods/openstack/octavia-db-sync-rjq45/\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-db-sync-rjq45/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-db-sync-rjq45/octavia-db-sync.txt\ncd+++++++++ pod-logs/failed-pods/openstack/octavia-health-manager-default-twmks/\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-health-manager-default-twmks/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-health-manager-default-twmks/octavia-health-manager-get-port.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-health-manager-default-twmks/octavia-health-manager-nic-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-health-manager-default-twmks/octavia-health-manager.txt\ncd+++++++++ pod-logs/failed-pods/openstack/octavia-housekeeping-87b98c47b-vqwct/\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-housekeeping-87b98c47b-vqwct/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-housekeeping-87b98c47b-vqwct/octavia-housekeeping.txt\ncd+++++++++ pod-logs/failed-pods/openstack/octavia-ks-endpoints-jdlzw/\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-endpoints-jdlzw/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-public.txt\ncd+++++++++ pod-logs/failed-pods/openstack/octavia-ks-service-rkdp9/\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-service-rkdp9/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-service-rkdp9/load-balancer-ks-service-registration.txt\ncd+++++++++ pod-logs/failed-pods/openstack/octavia-ks-user-tjl52/\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-user-tjl52/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-user-tjl52/octavia-ks-user.txt\ncd+++++++++ pod-logs/failed-pods/openstack/octavia-rabbit-init-vdqxf/\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-rabbit-init-vdqxf/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-rabbit-init-vdqxf/rabbit-init.txt\ncd+++++++++ pod-logs/failed-pods/openstack/octavia-worker-774cddbcdc-qxl6k/\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-worker-774cddbcdc-qxl6k/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/octavia-worker-774cddbcdc-qxl6k/octavia-worker.txt\ncd+++++++++ pod-logs/failed-pods/openstack/openstack-database-exporter-7c944bc9f-w2bdb/\n>f+++++++++ pod-logs/failed-pods/openstack/openstack-database-exporter-7c944bc9f-w2bdb/openstack-database-exporter.txt\ncd+++++++++ pod-logs/failed-pods/openstack/openstack-exporter-74676fb4b4-jrkwh/\n>f+++++++++ pod-logs/failed-pods/openstack/openstack-exporter-74676fb4b4-jrkwh/build-config.txt\n>f+++++++++ pod-logs/failed-pods/openstack/openstack-exporter-74676fb4b4-jrkwh/openstack-exporter.txt\ncd+++++++++ pod-logs/failed-pods/openstack/openvswitch-gj98d/\n>f+++++++++ pod-logs/failed-pods/openstack/openvswitch-gj98d/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/openvswitch-gj98d/openvswitch-db-perms.txt\n>f+++++++++ pod-logs/failed-pods/openstack/openvswitch-gj98d/openvswitch-db.txt\n>f+++++++++ pod-logs/failed-pods/openstack/openvswitch-gj98d/openvswitch-vswitchd-modules.txt\n>f+++++++++ pod-logs/failed-pods/openstack/openvswitch-gj98d/openvswitch-vswitchd.txt\ncd+++++++++ pod-logs/failed-pods/openstack/ovn-controller-6mbd4/\n>f+++++++++ pod-logs/failed-pods/openstack/ovn-controller-6mbd4/controller-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/ovn-controller-6mbd4/controller.txt\n>f+++++++++ pod-logs/failed-pods/openstack/ovn-controller-6mbd4/get-gw-enabled.txt\n>f+++++++++ pod-logs/failed-pods/openstack/ovn-controller-6mbd4/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/ovn-controller-6mbd4/log-parser.txt\n>f+++++++++ pod-logs/failed-pods/openstack/ovn-controller-6mbd4/vector.txt\ncd+++++++++ pod-logs/failed-pods/openstack/ovn-northd-6c6687ddd6-7grhs/\n>f+++++++++ pod-logs/failed-pods/openstack/ovn-northd-6c6687ddd6-7grhs/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/ovn-northd-6c6687ddd6-7grhs/northd.txt\ncd+++++++++ pod-logs/failed-pods/openstack/ovn-ovsdb-nb-0/\n>f+++++++++ pod-logs/failed-pods/openstack/ovn-ovsdb-nb-0/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/ovn-ovsdb-nb-0/ovsdb.txt\ncd+++++++++ pod-logs/failed-pods/openstack/ovn-ovsdb-sb-0/\n>f+++++++++ pod-logs/failed-pods/openstack/ovn-ovsdb-sb-0/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/ovn-ovsdb-sb-0/ovsdb.txt\ncd+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-haproxy-0/\n>f+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-haproxy-0/haproxy-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-haproxy-0/haproxy.txt\n>f+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-haproxy-0/pxc-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-haproxy-0/pxc-monit.txt\ncd+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-pxc-0/\n>f+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-pxc-0/exporter.txt\n>f+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-pxc-0/pxc-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-pxc-0/pxc.txt\ncd+++++++++ pod-logs/failed-pods/openstack/placement-api-75695696c6-brsxj/\n>f+++++++++ pod-logs/failed-pods/openstack/placement-api-75695696c6-brsxj/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/placement-api-75695696c6-brsxj/placement-api.txt\ncd+++++++++ pod-logs/failed-pods/openstack/placement-db-init-89t92/\n>f+++++++++ pod-logs/failed-pods/openstack/placement-db-init-89t92/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/placement-db-init-89t92/placement-db-init-0.txt\ncd+++++++++ pod-logs/failed-pods/openstack/placement-db-sync-nvqjv/\n>f+++++++++ pod-logs/failed-pods/openstack/placement-db-sync-nvqjv/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/placement-db-sync-nvqjv/placement-db-sync.txt\ncd+++++++++ pod-logs/failed-pods/openstack/placement-ks-endpoints-jmfl7/\n>f+++++++++ pod-logs/failed-pods/openstack/placement-ks-endpoints-jmfl7/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/failed-pods/openstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/failed-pods/openstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-public.txt\ncd+++++++++ pod-logs/failed-pods/openstack/placement-ks-service-qdjdz/\n>f+++++++++ pod-logs/failed-pods/openstack/placement-ks-service-qdjdz/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/placement-ks-service-qdjdz/placement-ks-service-registration.txt\ncd+++++++++ pod-logs/failed-pods/openstack/placement-ks-user-blkn9/\n>f+++++++++ pod-logs/failed-pods/openstack/placement-ks-user-blkn9/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/placement-ks-user-blkn9/placement-ks-user.txt\ncd+++++++++ pod-logs/failed-pods/openstack/pxc-operator-7cff949c8b-7zp4j/\n>f+++++++++ pod-logs/failed-pods/openstack/pxc-operator-7cff949c8b-7zp4j/percona-xtradb-cluster-operator.txt\ncd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-barbican-server-0/\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-barbican-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-barbican-server-0/setup-container.txt\ncd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-cinder-server-0/\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-cinder-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-cinder-server-0/setup-container.txt\ncd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-cluster-operator-5448d56d95-vk9km/\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-cluster-operator-5448d56d95-vk9km/rabbitmq-cluster-operator.txt\ncd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-glance-server-0/\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-glance-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-glance-server-0/setup-container.txt\ncd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-heat-server-0/\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-heat-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-heat-server-0/setup-container.txt\ncd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-keystone-server-0/\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-keystone-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-keystone-server-0/setup-container.txt\ncd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-magnum-server-0/\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-magnum-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-magnum-server-0/setup-container.txt\ncd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-manila-server-0/\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-manila-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-manila-server-0/setup-container.txt\ncd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-messaging-topology-operator-7f8596f788-84l9x/\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-messaging-topology-operator-7f8596f788-84l9x/rabbitmq-cluster-operator.txt\ncd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-neutron-server-0/\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-neutron-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-neutron-server-0/setup-container.txt\ncd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-nova-server-0/\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-nova-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-nova-server-0/setup-container.txt\ncd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-octavia-server-0/\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-octavia-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-octavia-server-0/setup-container.txt\ncd+++++++++ pod-logs/failed-pods/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/\n>f+++++++++ pod-logs/failed-pods/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/ceph-crash.txt\n>f+++++++++ pod-logs/failed-pods/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/chown-container-data-dir.txt\n>f+++++++++ pod-logs/failed-pods/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/make-container-crash-dir.txt\ncd+++++++++ pod-logs/failed-pods/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/\n>f+++++++++ pod-logs/failed-pods/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/chown-container-data-dir.txt\n>f+++++++++ pod-logs/failed-pods/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/log-collector.txt\n>f+++++++++ pod-logs/failed-pods/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/rgw.txt\ncd+++++++++ pod-logs/failed-pods/openstack/staffeln-api-6669c8779f-qgp4c/\n>f+++++++++ pod-logs/failed-pods/openstack/staffeln-api-6669c8779f-qgp4c/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/staffeln-api-6669c8779f-qgp4c/staffeln-api.txt\ncd+++++++++ pod-logs/failed-pods/openstack/staffeln-conductor-7b5d99bcd4-ws4sl/\n>f+++++++++ pod-logs/failed-pods/openstack/staffeln-conductor-7b5d99bcd4-ws4sl/staffeln-conductor.txt\ncd+++++++++ pod-logs/failed-pods/openstack/staffeln-db-init-p4pq4/\n>f+++++++++ pod-logs/failed-pods/openstack/staffeln-db-init-p4pq4/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/staffeln-db-init-p4pq4/staffeln-db-init-0.txt\ncd+++++++++ pod-logs/failed-pods/openstack/staffeln-db-sync-khzx8/\n>f+++++++++ pod-logs/failed-pods/openstack/staffeln-db-sync-khzx8/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/staffeln-db-sync-khzx8/staffeln-db-sync.txt\ncd+++++++++ pod-logs/failed-pods/openstack/tempest-ks-user-kwbf6/\n>f+++++++++ pod-logs/failed-pods/openstack/tempest-ks-user-kwbf6/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/tempest-ks-user-kwbf6/tempest-ks-user.txt\ncd+++++++++ pod-logs/failed-pods/openstack/tempest-run-tests-g5plh/\n>f+++++++++ pod-logs/failed-pods/openstack/tempest-run-tests-g5plh/init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/tempest-run-tests-g5plh/tempest-run-tests-init.txt\n>f+++++++++ pod-logs/failed-pods/openstack/tempest-run-tests-g5plh/tempest-run-tests.txt\ncd+++++++++ pod-logs/failed-pods/openstack/valkey-node-0/\n>f+++++++++ pod-logs/failed-pods/openstack/valkey-node-0/metrics.txt\n>f+++++++++ pod-logs/failed-pods/openstack/valkey-node-0/sentinel.txt\n>f+++++++++ pod-logs/failed-pods/openstack/valkey-node-0/valkey.txt\ncd+++++++++ pod-logs/failed-pods/orc-system/\ncd+++++++++ pod-logs/failed-pods/orc-system/orc-controller-manager-6cb597b5d4-glhcz/\n>f+++++++++ pod-logs/failed-pods/orc-system/orc-controller-manager-6cb597b5d4-glhcz/manager.txt\ncd+++++++++ pod-logs/failed-pods/rook-ceph/\ncd+++++++++ pod-logs/failed-pods/rook-ceph/rook-ceph-operator-7b66cfb94c-tj94j/\n>f+++++++++ pod-logs/failed-pods/rook-ceph/rook-ceph-operator-7b66cfb94c-tj94j/rook-ceph-operator.txt\ncd+++++++++ pod-logs/failed-pods/secretgen-controller/\ncd+++++++++ pod-logs/failed-pods/secretgen-controller/secretgen-controller-5cf976ccc7-szs5h/\n>f+++++++++ pod-logs/failed-pods/secretgen-controller/secretgen-controller-5cf976ccc7-szs5h/secretgen-controller.txt\ncd+++++++++ pod-logs/ingress-nginx/\ncd+++++++++ pod-logs/ingress-nginx/ingress-nginx-controller-j4bqv/\n>f+++++++++ pod-logs/ingress-nginx/ingress-nginx-controller-j4bqv/controller.txt\ncd+++++++++ pod-logs/ingress-nginx/ingress-nginx-defaultbackend-6987ff55cf-gpx4l/\n>f+++++++++ pod-logs/ingress-nginx/ingress-nginx-defaultbackend-6987ff55cf-gpx4l/ingress-nginx-default-backend.txt\ncd+++++++++ pod-logs/kube-system/\ncd+++++++++ pod-logs/kube-system/cilium-operator-869df985b8-kszk2/\n>f+++++++++ pod-logs/kube-system/cilium-operator-869df985b8-kszk2/cilium-operator.txt\ncd+++++++++ pod-logs/kube-system/cilium-vdz4f/\n>f+++++++++ pod-logs/kube-system/cilium-vdz4f/apply-sysctl-overwrites.txt\n>f+++++++++ pod-logs/kube-system/cilium-vdz4f/cilium-agent.txt\n>f+++++++++ pod-logs/kube-system/cilium-vdz4f/clean-cilium-state.txt\n>f+++++++++ pod-logs/kube-system/cilium-vdz4f/config.txt\n>f+++++++++ pod-logs/kube-system/cilium-vdz4f/install-cni-binaries.txt\n>f+++++++++ pod-logs/kube-system/cilium-vdz4f/mount-bpf-fs.txt\n>f+++++++++ pod-logs/kube-system/cilium-vdz4f/mount-cgroup.txt\ncd+++++++++ pod-logs/kube-system/coredns-67659f764b-6f2mm/\n>f+++++++++ pod-logs/kube-system/coredns-67659f764b-6f2mm/coredns.txt\ncd+++++++++ pod-logs/kube-system/coredns-67659f764b-j6fp4/\n>f+++++++++ pod-logs/kube-system/coredns-67659f764b-j6fp4/coredns.txt\ncd+++++++++ pod-logs/kube-system/etcd-instance/\n>f+++++++++ pod-logs/kube-system/etcd-instance/etcd.txt\ncd+++++++++ pod-logs/kube-system/kube-apiserver-instance/\n>f+++++++++ pod-logs/kube-system/kube-apiserver-instance/kube-apiserver.txt\ncd+++++++++ pod-logs/kube-system/kube-controller-manager-instance/\n>f+++++++++ pod-logs/kube-system/kube-controller-manager-instance/kube-controller-manager.txt\ncd+++++++++ pod-logs/kube-system/kube-proxy-sp2vs/\n>f+++++++++ pod-logs/kube-system/kube-proxy-sp2vs/kube-proxy.txt\ncd+++++++++ pod-logs/kube-system/kube-scheduler-instance/\n>f+++++++++ pod-logs/kube-system/kube-scheduler-instance/kube-scheduler.txt\ncd+++++++++ pod-logs/kube-system/kube-vip-instance/\n>f+++++++++ pod-logs/kube-system/kube-vip-instance/kube-vip.txt\ncd+++++++++ pod-logs/local-path-storage/\ncd+++++++++ pod-logs/local-path-storage/local-path-provisioner-679c578f5-7h8w5/\n>f+++++++++ pod-logs/local-path-storage/local-path-provisioner-679c578f5-7h8w5/local-path-provisioner.txt\ncd+++++++++ pod-logs/monitoring/\ncd+++++++++ pod-logs/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/\n>f+++++++++ pod-logs/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/alertmanager.txt\n>f+++++++++ pod-logs/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/config-reloader.txt\n>f+++++++++ pod-logs/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/init-config-reloader.txt\n>f+++++++++ pod-logs/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/oauth2-proxy.txt\ncd+++++++++ pod-logs/monitoring/goldpinger-7jzp8/\n>f+++++++++ pod-logs/monitoring/goldpinger-7jzp8/goldpinger-daemon.txt\ncd+++++++++ pod-logs/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/\n>f+++++++++ pod-logs/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana-sc-dashboard.txt\n>f+++++++++ pod-logs/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana-sc-datasources.txt\n>f+++++++++ pod-logs/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana.txt\ncd+++++++++ pod-logs/monitoring/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m/\n>f+++++++++ pod-logs/monitoring/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m/kube-state-metrics.txt\ncd+++++++++ pod-logs/monitoring/kube-prometheus-stack-operator-cd88cf4bf-lzh7g/\n>f+++++++++ pod-logs/monitoring/kube-prometheus-stack-operator-cd88cf4bf-lzh7g/kube-prometheus-stack.txt\ncd+++++++++ pod-logs/monitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/\n>f+++++++++ pod-logs/monitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/node-exporter.txt\n>f+++++++++ pod-logs/monitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/pod-tls-sidecar.txt\ncd+++++++++ pod-logs/monitoring/loki-0/\n>f+++++++++ pod-logs/monitoring/loki-0/loki.txt\ncd+++++++++ pod-logs/monitoring/loki-chunks-cache-0/\n>f+++++++++ pod-logs/monitoring/loki-chunks-cache-0/exporter.txt\n>f+++++++++ pod-logs/monitoring/loki-chunks-cache-0/memcached.txt\ncd+++++++++ pod-logs/monitoring/loki-gateway-cf54cb88c-zv654/\n>f+++++++++ pod-logs/monitoring/loki-gateway-cf54cb88c-zv654/nginx.txt\ncd+++++++++ pod-logs/monitoring/loki-results-cache-0/\n>f+++++++++ pod-logs/monitoring/loki-results-cache-0/exporter.txt\n>f+++++++++ pod-logs/monitoring/loki-results-cache-0/memcached.txt\ncd+++++++++ pod-logs/monitoring/node-feature-discovery-gc-6675cbb6d9-zv9sn/\n>f+++++++++ pod-logs/monitoring/node-feature-discovery-gc-6675cbb6d9-zv9sn/gc.txt\ncd+++++++++ pod-logs/monitoring/node-feature-discovery-master-8665476dbc-t4z5z/\n>f+++++++++ pod-logs/monitoring/node-feature-discovery-master-8665476dbc-t4z5z/master.txt\ncd+++++++++ pod-logs/monitoring/node-feature-discovery-worker-p8lmk/\n>f+++++++++ pod-logs/monitoring/node-feature-discovery-worker-p8lmk/worker.txt\ncd+++++++++ pod-logs/monitoring/prometheus-kube-prometheus-stack-prometheus-0/\n>f+++++++++ pod-logs/monitoring/prometheus-kube-prometheus-stack-prometheus-0/config-reloader.txt\n>f+++++++++ pod-logs/monitoring/prometheus-kube-prometheus-stack-prometheus-0/init-config-reloader.txt\n>f+++++++++ pod-logs/monitoring/prometheus-kube-prometheus-stack-prometheus-0/oauth2-proxy.txt\n>f+++++++++ pod-logs/monitoring/prometheus-kube-prometheus-stack-prometheus-0/pod-tls-sidecar.txt\n>f+++++++++ pod-logs/monitoring/prometheus-kube-prometheus-stack-prometheus-0/prometheus.txt\ncd+++++++++ pod-logs/monitoring/prometheus-pushgateway-7b8659c68b-28dht/\n>f+++++++++ pod-logs/monitoring/prometheus-pushgateway-7b8659c68b-28dht/pushgateway.txt\ncd+++++++++ pod-logs/monitoring/vector-qzjms/\n>f+++++++++ pod-logs/monitoring/vector-qzjms/vector.txt\ncd+++++++++ pod-logs/openstack/\ncd+++++++++ pod-logs/openstack/barbican-api-775987496d-z6jqv/\n>f+++++++++ pod-logs/openstack/barbican-api-775987496d-z6jqv/barbican-api.txt\n>f+++++++++ pod-logs/openstack/barbican-api-775987496d-z6jqv/init.txt\ncd+++++++++ pod-logs/openstack/barbican-db-init-nm8k6/\n>f+++++++++ pod-logs/openstack/barbican-db-init-nm8k6/barbican-db-init-0.txt\n>f+++++++++ pod-logs/openstack/barbican-db-init-nm8k6/init.txt\ncd+++++++++ pod-logs/openstack/barbican-db-sync-452x5/\n>f+++++++++ pod-logs/openstack/barbican-db-sync-452x5/barbican-db-sync.txt\n>f+++++++++ pod-logs/openstack/barbican-db-sync-452x5/init.txt\ncd+++++++++ pod-logs/openstack/barbican-ks-endpoints-w2ffg/\n>f+++++++++ pod-logs/openstack/barbican-ks-endpoints-w2ffg/init.txt\n>f+++++++++ pod-logs/openstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/openstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/openstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-public.txt\ncd+++++++++ pod-logs/openstack/barbican-ks-service-8pm7j/\n>f+++++++++ pod-logs/openstack/barbican-ks-service-8pm7j/init.txt\n>f+++++++++ pod-logs/openstack/barbican-ks-service-8pm7j/key-manager-ks-service-registration.txt\ncd+++++++++ pod-logs/openstack/barbican-ks-user-fszfr/\n>f+++++++++ pod-logs/openstack/barbican-ks-user-fszfr/barbican-ks-user.txt\n>f+++++++++ pod-logs/openstack/barbican-ks-user-fszfr/init.txt\ncd+++++++++ pod-logs/openstack/barbican-rabbit-init-j5qmd/\n>f+++++++++ pod-logs/openstack/barbican-rabbit-init-j5qmd/init.txt\n>f+++++++++ pod-logs/openstack/barbican-rabbit-init-j5qmd/rabbit-init.txt\ncd+++++++++ pod-logs/openstack/cinder-api-86d7694f66-j97gj/\n>f+++++++++ pod-logs/openstack/cinder-api-86d7694f66-j97gj/ceph-coordination-volume-perms.txt\n>f+++++++++ pod-logs/openstack/cinder-api-86d7694f66-j97gj/cinder-api.txt\n>f+++++++++ pod-logs/openstack/cinder-api-86d7694f66-j97gj/init.txt\ncd+++++++++ pod-logs/openstack/cinder-backup-dcfd7dfb7-sdwkc/\n>f+++++++++ pod-logs/openstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-backup-keyring-placement.txt\n>f+++++++++ pod-logs/openstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-coordination-volume-perms.txt\n>f+++++++++ pod-logs/openstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-keyring-placement-rbd1.txt\n>f+++++++++ pod-logs/openstack/cinder-backup-dcfd7dfb7-sdwkc/cinder-backup.txt\n>f+++++++++ pod-logs/openstack/cinder-backup-dcfd7dfb7-sdwkc/init.txt\ncd+++++++++ pod-logs/openstack/cinder-backup-storage-init-zmnkh/\n>f+++++++++ pod-logs/openstack/cinder-backup-storage-init-zmnkh/ceph-keyring-placement.txt\n>f+++++++++ pod-logs/openstack/cinder-backup-storage-init-zmnkh/cinder-backup-storage-init.txt\n>f+++++++++ pod-logs/openstack/cinder-backup-storage-init-zmnkh/init.txt\ncd+++++++++ pod-logs/openstack/cinder-bootstrap-wng86/\n>f+++++++++ pod-logs/openstack/cinder-bootstrap-wng86/bootstrap.txt\n>f+++++++++ pod-logs/openstack/cinder-bootstrap-wng86/init.txt\ncd+++++++++ pod-logs/openstack/cinder-create-internal-tenant-6vgll/\n>f+++++++++ pod-logs/openstack/cinder-create-internal-tenant-6vgll/create-internal-tenant.txt\n>f+++++++++ pod-logs/openstack/cinder-create-internal-tenant-6vgll/init.txt\ncd+++++++++ pod-logs/openstack/cinder-db-init-mzm5b/\n>f+++++++++ pod-logs/openstack/cinder-db-init-mzm5b/cinder-db-init-0.txt\n>f+++++++++ pod-logs/openstack/cinder-db-init-mzm5b/init.txt\ncd+++++++++ pod-logs/openstack/cinder-db-sync-mz6ls/\n>f+++++++++ pod-logs/openstack/cinder-db-sync-mz6ls/cinder-db-sync.txt\n>f+++++++++ pod-logs/openstack/cinder-db-sync-mz6ls/init.txt\ncd+++++++++ pod-logs/openstack/cinder-ks-endpoints-xv2tb/\n>f+++++++++ pod-logs/openstack/cinder-ks-endpoints-xv2tb/init.txt\n>f+++++++++ pod-logs/openstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/openstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/openstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-public.txt\ncd+++++++++ pod-logs/openstack/cinder-ks-service-dlcxz/\n>f+++++++++ pod-logs/openstack/cinder-ks-service-dlcxz/init.txt\n>f+++++++++ pod-logs/openstack/cinder-ks-service-dlcxz/volumev3-ks-service-registration.txt\ncd+++++++++ pod-logs/openstack/cinder-ks-user-5bd5g/\n>f+++++++++ pod-logs/openstack/cinder-ks-user-5bd5g/cinder-ks-user.txt\n>f+++++++++ pod-logs/openstack/cinder-ks-user-5bd5g/init.txt\ncd+++++++++ pod-logs/openstack/cinder-rabbit-init-l4fpm/\n>f+++++++++ pod-logs/openstack/cinder-rabbit-init-l4fpm/init.txt\n>f+++++++++ pod-logs/openstack/cinder-rabbit-init-l4fpm/rabbit-init.txt\ncd+++++++++ pod-logs/openstack/cinder-scheduler-586f444995-p7grf/\n>f+++++++++ pod-logs/openstack/cinder-scheduler-586f444995-p7grf/ceph-coordination-volume-perms.txt\n>f+++++++++ pod-logs/openstack/cinder-scheduler-586f444995-p7grf/cinder-scheduler.txt\n>f+++++++++ pod-logs/openstack/cinder-scheduler-586f444995-p7grf/init.txt\ncd+++++++++ pod-logs/openstack/cinder-storage-init-vt6br/\n>f+++++++++ pod-logs/openstack/cinder-storage-init-vt6br/ceph-keyring-placement.txt\n>f+++++++++ pod-logs/openstack/cinder-storage-init-vt6br/cinder-storage-init-rbd1.txt\n>f+++++++++ pod-logs/openstack/cinder-storage-init-vt6br/init.txt\ncd+++++++++ pod-logs/openstack/cinder-volume-66dc847979-qgp4l/\n>f+++++++++ pod-logs/openstack/cinder-volume-66dc847979-qgp4l/ceph-coordination-volume-perms.txt\n>f+++++++++ pod-logs/openstack/cinder-volume-66dc847979-qgp4l/ceph-keyring-placement-rbd1.txt\n>f+++++++++ pod-logs/openstack/cinder-volume-66dc847979-qgp4l/cinder-volume.txt\n>f+++++++++ pod-logs/openstack/cinder-volume-66dc847979-qgp4l/init-cinder-conf.txt\n>f+++++++++ pod-logs/openstack/cinder-volume-66dc847979-qgp4l/init.txt\ncd+++++++++ pod-logs/openstack/cinder-volume-usage-audit-29540045-jbmvh/\n>f+++++++++ pod-logs/openstack/cinder-volume-usage-audit-29540045-jbmvh/cinder-volume-usage-audit.txt\n>f+++++++++ pod-logs/openstack/cinder-volume-usage-audit-29540045-jbmvh/init.txt\ncd+++++++++ pod-logs/openstack/glance-api-65d579bfc8-6x76l/\n>f+++++++++ pod-logs/openstack/glance-api-65d579bfc8-6x76l/ceph-keyring-placement.txt\n>f+++++++++ pod-logs/openstack/glance-api-65d579bfc8-6x76l/glance-api.txt\n>f+++++++++ pod-logs/openstack/glance-api-65d579bfc8-6x76l/glance-perms.txt\n>f+++++++++ pod-logs/openstack/glance-api-65d579bfc8-6x76l/init.txt\ncd+++++++++ pod-logs/openstack/glance-db-init-wbpff/\n>f+++++++++ pod-logs/openstack/glance-db-init-wbpff/glance-db-init-0.txt\n>f+++++++++ pod-logs/openstack/glance-db-init-wbpff/init.txt\ncd+++++++++ pod-logs/openstack/glance-db-sync-gk84f/\n>f+++++++++ pod-logs/openstack/glance-db-sync-gk84f/glance-db-sync.txt\n>f+++++++++ pod-logs/openstack/glance-db-sync-gk84f/init.txt\ncd+++++++++ pod-logs/openstack/glance-ks-endpoints-dq2cc/\n>f+++++++++ pod-logs/openstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/openstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/openstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-public.txt\n>f+++++++++ pod-logs/openstack/glance-ks-endpoints-dq2cc/init.txt\ncd+++++++++ pod-logs/openstack/glance-ks-service-5h6bw/\n>f+++++++++ pod-logs/openstack/glance-ks-service-5h6bw/image-ks-service-registration.txt\n>f+++++++++ pod-logs/openstack/glance-ks-service-5h6bw/init.txt\ncd+++++++++ pod-logs/openstack/glance-ks-user-lcfxr/\n>f+++++++++ pod-logs/openstack/glance-ks-user-lcfxr/glance-ks-user.txt\n>f+++++++++ pod-logs/openstack/glance-ks-user-lcfxr/init.txt\ncd+++++++++ pod-logs/openstack/glance-metadefs-load-476tp/\n>f+++++++++ pod-logs/openstack/glance-metadefs-load-476tp/glance-metadefs-load.txt\n>f+++++++++ pod-logs/openstack/glance-metadefs-load-476tp/init.txt\ncd+++++++++ pod-logs/openstack/glance-rabbit-init-c6rjt/\n>f+++++++++ pod-logs/openstack/glance-rabbit-init-c6rjt/init.txt\n>f+++++++++ pod-logs/openstack/glance-rabbit-init-c6rjt/rabbit-init.txt\ncd+++++++++ pod-logs/openstack/glance-storage-init-hdcpc/\n>f+++++++++ pod-logs/openstack/glance-storage-init-hdcpc/ceph-keyring-placement.txt\n>f+++++++++ pod-logs/openstack/glance-storage-init-hdcpc/glance-storage-init.txt\n>f+++++++++ pod-logs/openstack/glance-storage-init-hdcpc/init.txt\ncd+++++++++ pod-logs/openstack/heat-api-6d65f9477-kmbkl/\n>f+++++++++ pod-logs/openstack/heat-api-6d65f9477-kmbkl/heat-api.txt\n>f+++++++++ pod-logs/openstack/heat-api-6d65f9477-kmbkl/init.txt\ncd+++++++++ pod-logs/openstack/heat-bootstrap-9dwg2/\n>f+++++++++ pod-logs/openstack/heat-bootstrap-9dwg2/bootstrap.txt\n>f+++++++++ pod-logs/openstack/heat-bootstrap-9dwg2/init.txt\ncd+++++++++ pod-logs/openstack/heat-cfn-f44db7787-t8f7m/\n>f+++++++++ pod-logs/openstack/heat-cfn-f44db7787-t8f7m/heat-cfn.txt\n>f+++++++++ pod-logs/openstack/heat-cfn-f44db7787-t8f7m/init.txt\ncd+++++++++ pod-logs/openstack/heat-db-init-fk8qw/\n>f+++++++++ pod-logs/openstack/heat-db-init-fk8qw/heat-db-init-0.txt\n>f+++++++++ pod-logs/openstack/heat-db-init-fk8qw/init.txt\ncd+++++++++ pod-logs/openstack/heat-db-sync-cxmcb/\n>f+++++++++ pod-logs/openstack/heat-db-sync-cxmcb/heat-db-sync.txt\n>f+++++++++ pod-logs/openstack/heat-db-sync-cxmcb/init.txt\ncd+++++++++ pod-logs/openstack/heat-domain-ks-user-tq2c5/\n>f+++++++++ pod-logs/openstack/heat-domain-ks-user-tq2c5/heat-ks-domain-user.txt\n>f+++++++++ pod-logs/openstack/heat-domain-ks-user-tq2c5/init.txt\ncd+++++++++ pod-logs/openstack/heat-engine-64f8b77bfb-wngkr/\n>f+++++++++ pod-logs/openstack/heat-engine-64f8b77bfb-wngkr/heat-engine.txt\n>f+++++++++ pod-logs/openstack/heat-engine-64f8b77bfb-wngkr/init.txt\ncd+++++++++ pod-logs/openstack/heat-engine-cleaner-29540055-p9pq9/\n>f+++++++++ pod-logs/openstack/heat-engine-cleaner-29540055-p9pq9/heat-engine-cleaner.txt\n>f+++++++++ pod-logs/openstack/heat-engine-cleaner-29540055-p9pq9/init.txt\ncd+++++++++ pod-logs/openstack/heat-engine-cleaner-29540060-z4g95/\n>f+++++++++ pod-logs/openstack/heat-engine-cleaner-29540060-z4g95/heat-engine-cleaner.txt\n>f+++++++++ pod-logs/openstack/heat-engine-cleaner-29540060-z4g95/init.txt\ncd+++++++++ pod-logs/openstack/heat-engine-cleaner-29540065-rcjr2/\n>f+++++++++ pod-logs/openstack/heat-engine-cleaner-29540065-rcjr2/heat-engine-cleaner.txt\n>f+++++++++ pod-logs/openstack/heat-engine-cleaner-29540065-rcjr2/init.txt\ncd+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/\n>f+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-public.txt\n>f+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/init.txt\n>f+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-public.txt\ncd+++++++++ pod-logs/openstack/heat-ks-service-8pxqz/\n>f+++++++++ pod-logs/openstack/heat-ks-service-8pxqz/cloudformation-ks-service-registration.txt\n>f+++++++++ pod-logs/openstack/heat-ks-service-8pxqz/init.txt\n>f+++++++++ pod-logs/openstack/heat-ks-service-8pxqz/orchestration-ks-service-registration.txt\ncd+++++++++ pod-logs/openstack/heat-ks-user-tfk98/\n>f+++++++++ pod-logs/openstack/heat-ks-user-tfk98/heat-ks-user.txt\n>f+++++++++ pod-logs/openstack/heat-ks-user-tfk98/heat-trustee-ks-user.txt\n>f+++++++++ pod-logs/openstack/heat-ks-user-tfk98/init.txt\ncd+++++++++ pod-logs/openstack/heat-rabbit-init-rbl9n/\n>f+++++++++ pod-logs/openstack/heat-rabbit-init-rbl9n/init.txt\n>f+++++++++ pod-logs/openstack/heat-rabbit-init-rbl9n/rabbit-init.txt\ncd+++++++++ pod-logs/openstack/heat-trusts-czrrv/\n>f+++++++++ pod-logs/openstack/heat-trusts-czrrv/heat-trusts.txt\n>f+++++++++ pod-logs/openstack/heat-trusts-czrrv/init.txt\ncd+++++++++ pod-logs/openstack/horizon-8cdd7b888-bvzvx/\n>f+++++++++ pod-logs/openstack/horizon-8cdd7b888-bvzvx/horizon.txt\n>f+++++++++ pod-logs/openstack/horizon-8cdd7b888-bvzvx/init.txt\ncd+++++++++ pod-logs/openstack/horizon-db-init-s5pbw/\n>f+++++++++ pod-logs/openstack/horizon-db-init-s5pbw/horizon-db-init-0.txt\n>f+++++++++ pod-logs/openstack/horizon-db-init-s5pbw/init.txt\ncd+++++++++ pod-logs/openstack/horizon-db-sync-bgr2g/\n>f+++++++++ pod-logs/openstack/horizon-db-sync-bgr2g/horizon-db-sync.txt\n>f+++++++++ pod-logs/openstack/horizon-db-sync-bgr2g/init.txt\ncd+++++++++ pod-logs/openstack/keepalived-7jdfz/\n>f+++++++++ pod-logs/openstack/keepalived-7jdfz/init.txt\n>f+++++++++ pod-logs/openstack/keepalived-7jdfz/keepalived.txt\n>f+++++++++ pod-logs/openstack/keepalived-7jdfz/wait-for-ip.txt\ncd+++++++++ pod-logs/openstack/keystone-api-c4656754c-mqbxm/\n>f+++++++++ pod-logs/openstack/keystone-api-c4656754c-mqbxm/init.txt\n>f+++++++++ pod-logs/openstack/keystone-api-c4656754c-mqbxm/keystone-api.txt\ncd+++++++++ pod-logs/openstack/keystone-bootstrap-mdtrx/\n>f+++++++++ pod-logs/openstack/keystone-bootstrap-mdtrx/bootstrap.txt\n>f+++++++++ pod-logs/openstack/keystone-bootstrap-mdtrx/init.txt\ncd+++++++++ pod-logs/openstack/keystone-credential-setup-6xsvx/\n>f+++++++++ pod-logs/openstack/keystone-credential-setup-6xsvx/init.txt\n>f+++++++++ pod-logs/openstack/keystone-credential-setup-6xsvx/keystone-credential-setup.txt\ncd+++++++++ pod-logs/openstack/keystone-db-init-z5mwz/\n>f+++++++++ pod-logs/openstack/keystone-db-init-z5mwz/init.txt\n>f+++++++++ pod-logs/openstack/keystone-db-init-z5mwz/keystone-db-init-0.txt\ncd+++++++++ pod-logs/openstack/keystone-db-sync-zsq8z/\n>f+++++++++ pod-logs/openstack/keystone-db-sync-zsq8z/init.txt\n>f+++++++++ pod-logs/openstack/keystone-db-sync-zsq8z/keystone-db-sync.txt\ncd+++++++++ pod-logs/openstack/keystone-domain-manage-v865d/\n>f+++++++++ pod-logs/openstack/keystone-domain-manage-v865d/init.txt\n>f+++++++++ pod-logs/openstack/keystone-domain-manage-v865d/keystone-domain-manage.txt\ncd+++++++++ pod-logs/openstack/keystone-fernet-setup-5rfqs/\n>f+++++++++ pod-logs/openstack/keystone-fernet-setup-5rfqs/init.txt\n>f+++++++++ pod-logs/openstack/keystone-fernet-setup-5rfqs/keystone-fernet-setup.txt\ncd+++++++++ pod-logs/openstack/keystone-rabbit-init-m44qz/\n>f+++++++++ pod-logs/openstack/keystone-rabbit-init-m44qz/init.txt\n>f+++++++++ pod-logs/openstack/keystone-rabbit-init-m44qz/rabbit-init.txt\ncd+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/\n>f+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/ceph-admin-keyring-placement.txt\n>f+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/ceph-keyring-placement.txt\n>f+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/init-dynamic-options.txt\n>f+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/init.txt\n>f+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/libvirt-exporter.txt\n>f+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/libvirt.txt\n>f+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/tls-sidecar.txt\ncd+++++++++ pod-logs/openstack/magnum-api-8549df7884-9b2zc/\n>f+++++++++ pod-logs/openstack/magnum-api-8549df7884-9b2zc/init.txt\n>f+++++++++ pod-logs/openstack/magnum-api-8549df7884-9b2zc/magnum-api.txt\ncd+++++++++ pod-logs/openstack/magnum-cluster-api-proxy-z2flh/\n>f+++++++++ pod-logs/openstack/magnum-cluster-api-proxy-z2flh/magnum-cluster-api-proxy.txt\ncd+++++++++ pod-logs/openstack/magnum-conductor-0/\n>f+++++++++ pod-logs/openstack/magnum-conductor-0/init.txt\n>f+++++++++ pod-logs/openstack/magnum-conductor-0/magnum-conductor-init.txt\n>f+++++++++ pod-logs/openstack/magnum-conductor-0/magnum-conductor.txt\ncd+++++++++ pod-logs/openstack/magnum-db-init-dshrc/\n>f+++++++++ pod-logs/openstack/magnum-db-init-dshrc/init.txt\n>f+++++++++ pod-logs/openstack/magnum-db-init-dshrc/magnum-db-init-0.txt\ncd+++++++++ pod-logs/openstack/magnum-db-sync-8ttpk/\n>f+++++++++ pod-logs/openstack/magnum-db-sync-8ttpk/init.txt\n>f+++++++++ pod-logs/openstack/magnum-db-sync-8ttpk/magnum-db-sync.txt\ncd+++++++++ pod-logs/openstack/magnum-domain-ks-user-vp8f2/\n>f+++++++++ pod-logs/openstack/magnum-domain-ks-user-vp8f2/init.txt\n>f+++++++++ pod-logs/openstack/magnum-domain-ks-user-vp8f2/magnum-ks-domain-user.txt\ncd+++++++++ pod-logs/openstack/magnum-ks-endpoints-jvzvf/\n>f+++++++++ pod-logs/openstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/openstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/openstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-public.txt\n>f+++++++++ pod-logs/openstack/magnum-ks-endpoints-jvzvf/init.txt\ncd+++++++++ pod-logs/openstack/magnum-ks-service-vdn67/\n>f+++++++++ pod-logs/openstack/magnum-ks-service-vdn67/container-infra-ks-service-registration.txt\n>f+++++++++ pod-logs/openstack/magnum-ks-service-vdn67/init.txt\ncd+++++++++ pod-logs/openstack/magnum-ks-user-4wvtj/\n>f+++++++++ pod-logs/openstack/magnum-ks-user-4wvtj/init.txt\n>f+++++++++ pod-logs/openstack/magnum-ks-user-4wvtj/magnum-ks-user.txt\ncd+++++++++ pod-logs/openstack/magnum-rabbit-init-w7jc7/\n>f+++++++++ pod-logs/openstack/magnum-rabbit-init-w7jc7/init.txt\n>f+++++++++ pod-logs/openstack/magnum-rabbit-init-w7jc7/rabbit-init.txt\ncd+++++++++ pod-logs/openstack/magnum-registry-c45778976-2zz96/\n>f+++++++++ pod-logs/openstack/magnum-registry-c45778976-2zz96/registry.txt\ncd+++++++++ pod-logs/openstack/manila-api-5cdf958bd9-hmbmb/\n>f+++++++++ pod-logs/openstack/manila-api-5cdf958bd9-hmbmb/init.txt\n>f+++++++++ pod-logs/openstack/manila-api-5cdf958bd9-hmbmb/manila-api.txt\ncd+++++++++ pod-logs/openstack/manila-bootstrap-5wn97/\n>f+++++++++ pod-logs/openstack/manila-bootstrap-5wn97/bootstrap.txt\n>f+++++++++ pod-logs/openstack/manila-bootstrap-5wn97/init.txt\ncd+++++++++ pod-logs/openstack/manila-data-75cbc955bd-27jjw/\n>f+++++++++ pod-logs/openstack/manila-data-75cbc955bd-27jjw/init.txt\n>f+++++++++ pod-logs/openstack/manila-data-75cbc955bd-27jjw/manila-data.txt\ncd+++++++++ pod-logs/openstack/manila-db-init-pbdm8/\n>f+++++++++ pod-logs/openstack/manila-db-init-pbdm8/init.txt\n>f+++++++++ pod-logs/openstack/manila-db-init-pbdm8/manila-db-init-0.txt\ncd+++++++++ pod-logs/openstack/manila-db-sync-rm9mz/\n>f+++++++++ pod-logs/openstack/manila-db-sync-rm9mz/init.txt\n>f+++++++++ pod-logs/openstack/manila-db-sync-rm9mz/manila-db-sync.txt\ncd+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/\n>f+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/init.txt\n>f+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-public.txt\n>f+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-public.txt\ncd+++++++++ pod-logs/openstack/manila-ks-service-g7svt/\n>f+++++++++ pod-logs/openstack/manila-ks-service-g7svt/init.txt\n>f+++++++++ pod-logs/openstack/manila-ks-service-g7svt/share-ks-service-registration.txt\n>f+++++++++ pod-logs/openstack/manila-ks-service-g7svt/sharev2-ks-service-registration.txt\ncd+++++++++ pod-logs/openstack/manila-ks-user-pr9mg/\n>f+++++++++ pod-logs/openstack/manila-ks-user-pr9mg/init.txt\n>f+++++++++ pod-logs/openstack/manila-ks-user-pr9mg/manila-ks-user.txt\ncd+++++++++ pod-logs/openstack/manila-rabbit-init-74vjs/\n>f+++++++++ pod-logs/openstack/manila-rabbit-init-74vjs/init.txt\n>f+++++++++ pod-logs/openstack/manila-rabbit-init-74vjs/rabbit-init.txt\ncd+++++++++ pod-logs/openstack/manila-scheduler-5b584c8656-mmnnd/\n>f+++++++++ pod-logs/openstack/manila-scheduler-5b584c8656-mmnnd/init.txt\n>f+++++++++ pod-logs/openstack/manila-scheduler-5b584c8656-mmnnd/manila-scheduler.txt\ncd+++++++++ pod-logs/openstack/manila-share-68879775b-rc6q9/\n>f+++++++++ pod-logs/openstack/manila-share-68879775b-rc6q9/init.txt\n>f+++++++++ pod-logs/openstack/manila-share-68879775b-rc6q9/manila-share-init.txt\n>f+++++++++ pod-logs/openstack/manila-share-68879775b-rc6q9/manila-share.txt\ncd+++++++++ pod-logs/openstack/memcached-memcached-6479589586-9sxjx/\n>f+++++++++ pod-logs/openstack/memcached-memcached-6479589586-9sxjx/init.txt\n>f+++++++++ pod-logs/openstack/memcached-memcached-6479589586-9sxjx/memcached-exporter.txt\n>f+++++++++ pod-logs/openstack/memcached-memcached-6479589586-9sxjx/memcached.txt\ncd+++++++++ pod-logs/openstack/neutron-db-init-l7c9v/\n>f+++++++++ pod-logs/openstack/neutron-db-init-l7c9v/init.txt\n>f+++++++++ pod-logs/openstack/neutron-db-init-l7c9v/neutron-db-init-0.txt\ncd+++++++++ pod-logs/openstack/neutron-db-sync-brwb5/\n>f+++++++++ pod-logs/openstack/neutron-db-sync-brwb5/init.txt\n>f+++++++++ pod-logs/openstack/neutron-db-sync-brwb5/neutron-db-sync.txt\ncd+++++++++ pod-logs/openstack/neutron-ks-endpoints-dstkg/\n>f+++++++++ pod-logs/openstack/neutron-ks-endpoints-dstkg/init.txt\n>f+++++++++ pod-logs/openstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/openstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/openstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-public.txt\ncd+++++++++ pod-logs/openstack/neutron-ks-service-sq4tp/\n>f+++++++++ pod-logs/openstack/neutron-ks-service-sq4tp/init.txt\n>f+++++++++ pod-logs/openstack/neutron-ks-service-sq4tp/network-ks-service-registration.txt\ncd+++++++++ pod-logs/openstack/neutron-ks-user-kcfc4/\n>f+++++++++ pod-logs/openstack/neutron-ks-user-kcfc4/init.txt\n>f+++++++++ pod-logs/openstack/neutron-ks-user-kcfc4/neutron-ks-user.txt\ncd+++++++++ pod-logs/openstack/neutron-netns-cleanup-cron-default-8frwf/\n>f+++++++++ pod-logs/openstack/neutron-netns-cleanup-cron-default-8frwf/init.txt\n>f+++++++++ pod-logs/openstack/neutron-netns-cleanup-cron-default-8frwf/neutron-netns-cleanup-cron.txt\ncd+++++++++ pod-logs/openstack/neutron-ovn-metadata-agent-default-flhb5/\n>f+++++++++ pod-logs/openstack/neutron-ovn-metadata-agent-default-flhb5/init.txt\n>f+++++++++ pod-logs/openstack/neutron-ovn-metadata-agent-default-flhb5/neutron-metadata-agent-init.txt\n>f+++++++++ pod-logs/openstack/neutron-ovn-metadata-agent-default-flhb5/neutron-ovn-metadata-agent.txt\n>f+++++++++ pod-logs/openstack/neutron-ovn-metadata-agent-default-flhb5/ovn-neutron-init.txt\ncd+++++++++ pod-logs/openstack/neutron-rabbit-init-rdnbf/\n>f+++++++++ pod-logs/openstack/neutron-rabbit-init-rdnbf/init.txt\n>f+++++++++ pod-logs/openstack/neutron-rabbit-init-rdnbf/rabbit-init.txt\ncd+++++++++ pod-logs/openstack/neutron-server-649c5974f6-5dkvl/\n>f+++++++++ pod-logs/openstack/neutron-server-649c5974f6-5dkvl/init.txt\n>f+++++++++ pod-logs/openstack/neutron-server-649c5974f6-5dkvl/neutron-policy-server.txt\n>f+++++++++ pod-logs/openstack/neutron-server-649c5974f6-5dkvl/neutron-server.txt\n>f+++++++++ pod-logs/openstack/neutron-server-649c5974f6-5dkvl/ovn-neutron-init.txt\ncd+++++++++ pod-logs/openstack/nova-api-metadata-546d94ddd7-btnrc/\n>f+++++++++ pod-logs/openstack/nova-api-metadata-546d94ddd7-btnrc/init.txt\n>f+++++++++ pod-logs/openstack/nova-api-metadata-546d94ddd7-btnrc/nova-api-metadata-init.txt\n>f+++++++++ pod-logs/openstack/nova-api-metadata-546d94ddd7-btnrc/nova-api.txt\ncd+++++++++ pod-logs/openstack/nova-api-osapi-99c7b7cd8-2lnzr/\n>f+++++++++ pod-logs/openstack/nova-api-osapi-99c7b7cd8-2lnzr/init.txt\n>f+++++++++ pod-logs/openstack/nova-api-osapi-99c7b7cd8-2lnzr/nova-osapi.txt\ncd+++++++++ pod-logs/openstack/nova-bootstrap-trzqq/\n>f+++++++++ pod-logs/openstack/nova-bootstrap-trzqq/bootstrap.txt\n>f+++++++++ pod-logs/openstack/nova-bootstrap-trzqq/init.txt\ncd+++++++++ pod-logs/openstack/nova-cell-setup-29540040-rtzd7/\n>f+++++++++ pod-logs/openstack/nova-cell-setup-29540040-rtzd7/init.txt\n>f+++++++++ pod-logs/openstack/nova-cell-setup-29540040-rtzd7/nova-cell-setup.txt\ncd+++++++++ pod-logs/openstack/nova-cell-setup-j97qh/\n>f+++++++++ pod-logs/openstack/nova-cell-setup-j97qh/init.txt\n>f+++++++++ pod-logs/openstack/nova-cell-setup-j97qh/nova-cell-setup-init.txt\n>f+++++++++ pod-logs/openstack/nova-cell-setup-j97qh/nova-cell-setup.txt\ncd+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/\n>f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/ceph-admin-keyring-placement.txt\n>f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/ceph-keyring-placement.txt\n>f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/ceph-perms.txt\n>f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/init.txt\n>f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/nova-compute-init.txt\n>f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/nova-compute-ssh-init.txt\n>f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/nova-compute-ssh.txt\n>f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/nova-compute-vnc-init.txt\n>f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/nova-compute.txt\ncd+++++++++ pod-logs/openstack/nova-conductor-5474cb4b8d-bxzhq/\n>f+++++++++ pod-logs/openstack/nova-conductor-5474cb4b8d-bxzhq/init.txt\n>f+++++++++ pod-logs/openstack/nova-conductor-5474cb4b8d-bxzhq/nova-conductor.txt\ncd+++++++++ pod-logs/openstack/nova-db-init-b4sqh/\n>f+++++++++ pod-logs/openstack/nova-db-init-b4sqh/init.txt\n>f+++++++++ pod-logs/openstack/nova-db-init-b4sqh/nova-db-init-0.txt\n>f+++++++++ pod-logs/openstack/nova-db-init-b4sqh/nova-db-init-1.txt\n>f+++++++++ pod-logs/openstack/nova-db-init-b4sqh/nova-db-init-2.txt\ncd+++++++++ pod-logs/openstack/nova-db-sync-2rbjc/\n>f+++++++++ pod-logs/openstack/nova-db-sync-2rbjc/init.txt\n>f+++++++++ pod-logs/openstack/nova-db-sync-2rbjc/nova-db-sync.txt\ncd+++++++++ pod-logs/openstack/nova-ks-endpoints-zwcm6/\n>f+++++++++ pod-logs/openstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/openstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/openstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-public.txt\n>f+++++++++ pod-logs/openstack/nova-ks-endpoints-zwcm6/init.txt\ncd+++++++++ pod-logs/openstack/nova-ks-service-fmj77/\n>f+++++++++ pod-logs/openstack/nova-ks-service-fmj77/compute-ks-service-registration.txt\n>f+++++++++ pod-logs/openstack/nova-ks-service-fmj77/init.txt\ncd+++++++++ pod-logs/openstack/nova-ks-user-t8xgz/\n>f+++++++++ pod-logs/openstack/nova-ks-user-t8xgz/init.txt\n>f+++++++++ pod-logs/openstack/nova-ks-user-t8xgz/nova-ks-user.txt\ncd+++++++++ pod-logs/openstack/nova-novncproxy-85dd5b5965-z6hmj/\n>f+++++++++ pod-logs/openstack/nova-novncproxy-85dd5b5965-z6hmj/init.txt\n>f+++++++++ pod-logs/openstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy-init-assets.txt\n>f+++++++++ pod-logs/openstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy-init.txt\n>f+++++++++ pod-logs/openstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy.txt\ncd+++++++++ pod-logs/openstack/nova-rabbit-init-szpvx/\n>f+++++++++ pod-logs/openstack/nova-rabbit-init-szpvx/init.txt\n>f+++++++++ pod-logs/openstack/nova-rabbit-init-szpvx/rabbit-init.txt\ncd+++++++++ pod-logs/openstack/nova-scheduler-78775555d4-hb2j9/\n>f+++++++++ pod-logs/openstack/nova-scheduler-78775555d4-hb2j9/init.txt\n>f+++++++++ pod-logs/openstack/nova-scheduler-78775555d4-hb2j9/nova-scheduler.txt\ncd+++++++++ pod-logs/openstack/nova-service-cleaner-29540040-cxdd4/\n>f+++++++++ pod-logs/openstack/nova-service-cleaner-29540040-cxdd4/init.txt\n>f+++++++++ pod-logs/openstack/nova-service-cleaner-29540040-cxdd4/nova-service-cleaner.txt\ncd+++++++++ pod-logs/openstack/octavia-api-75db6578cf-m656r/\n>f+++++++++ pod-logs/openstack/octavia-api-75db6578cf-m656r/init.txt\n>f+++++++++ pod-logs/openstack/octavia-api-75db6578cf-m656r/octavia-api.txt\ncd+++++++++ pod-logs/openstack/octavia-bootstrap-kwfv2/\n>f+++++++++ pod-logs/openstack/octavia-bootstrap-kwfv2/bootstrap.txt\n>f+++++++++ pod-logs/openstack/octavia-bootstrap-kwfv2/init.txt\ncd+++++++++ pod-logs/openstack/octavia-db-init-wnz5h/\n>f+++++++++ pod-logs/openstack/octavia-db-init-wnz5h/init.txt\n>f+++++++++ pod-logs/openstack/octavia-db-init-wnz5h/octavia-db-init-0.txt\n>f+++++++++ pod-logs/openstack/octavia-db-init-wnz5h/octavia-db-init-1.txt\ncd+++++++++ pod-logs/openstack/octavia-db-sync-rjq45/\n>f+++++++++ pod-logs/openstack/octavia-db-sync-rjq45/init.txt\n>f+++++++++ pod-logs/openstack/octavia-db-sync-rjq45/octavia-db-sync.txt\ncd+++++++++ pod-logs/openstack/octavia-health-manager-default-twmks/\n>f+++++++++ pod-logs/openstack/octavia-health-manager-default-twmks/init.txt\n>f+++++++++ pod-logs/openstack/octavia-health-manager-default-twmks/octavia-health-manager-get-port.txt\n>f+++++++++ pod-logs/openstack/octavia-health-manager-default-twmks/octavia-health-manager-nic-init.txt\n>f+++++++++ pod-logs/openstack/octavia-health-manager-default-twmks/octavia-health-manager.txt\ncd+++++++++ pod-logs/openstack/octavia-housekeeping-87b98c47b-vqwct/\n>f+++++++++ pod-logs/openstack/octavia-housekeeping-87b98c47b-vqwct/init.txt\n>f+++++++++ pod-logs/openstack/octavia-housekeeping-87b98c47b-vqwct/octavia-housekeeping.txt\ncd+++++++++ pod-logs/openstack/octavia-ks-endpoints-jdlzw/\n>f+++++++++ pod-logs/openstack/octavia-ks-endpoints-jdlzw/init.txt\n>f+++++++++ pod-logs/openstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/openstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/openstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-public.txt\ncd+++++++++ pod-logs/openstack/octavia-ks-service-rkdp9/\n>f+++++++++ pod-logs/openstack/octavia-ks-service-rkdp9/init.txt\n>f+++++++++ pod-logs/openstack/octavia-ks-service-rkdp9/load-balancer-ks-service-registration.txt\ncd+++++++++ pod-logs/openstack/octavia-ks-user-tjl52/\n>f+++++++++ pod-logs/openstack/octavia-ks-user-tjl52/init.txt\n>f+++++++++ pod-logs/openstack/octavia-ks-user-tjl52/octavia-ks-user.txt\ncd+++++++++ pod-logs/openstack/octavia-rabbit-init-vdqxf/\n>f+++++++++ pod-logs/openstack/octavia-rabbit-init-vdqxf/init.txt\n>f+++++++++ pod-logs/openstack/octavia-rabbit-init-vdqxf/rabbit-init.txt\ncd+++++++++ pod-logs/openstack/octavia-worker-774cddbcdc-qxl6k/\n>f+++++++++ pod-logs/openstack/octavia-worker-774cddbcdc-qxl6k/init.txt\n>f+++++++++ pod-logs/openstack/octavia-worker-774cddbcdc-qxl6k/octavia-worker.txt\ncd+++++++++ pod-logs/openstack/openstack-database-exporter-7c944bc9f-w2bdb/\n>f+++++++++ pod-logs/openstack/openstack-database-exporter-7c944bc9f-w2bdb/openstack-database-exporter.txt\ncd+++++++++ pod-logs/openstack/openstack-exporter-74676fb4b4-jrkwh/\n>f+++++++++ pod-logs/openstack/openstack-exporter-74676fb4b4-jrkwh/build-config.txt\n>f+++++++++ pod-logs/openstack/openstack-exporter-74676fb4b4-jrkwh/openstack-exporter.txt\ncd+++++++++ pod-logs/openstack/openvswitch-gj98d/\n>f+++++++++ pod-logs/openstack/openvswitch-gj98d/init.txt\n>f+++++++++ pod-logs/openstack/openvswitch-gj98d/openvswitch-db-perms.txt\n>f+++++++++ pod-logs/openstack/openvswitch-gj98d/openvswitch-db.txt\n>f+++++++++ pod-logs/openstack/openvswitch-gj98d/openvswitch-vswitchd-modules.txt\n>f+++++++++ pod-logs/openstack/openvswitch-gj98d/openvswitch-vswitchd.txt\ncd+++++++++ pod-logs/openstack/ovn-controller-6mbd4/\n>f+++++++++ pod-logs/openstack/ovn-controller-6mbd4/controller-init.txt\n>f+++++++++ pod-logs/openstack/ovn-controller-6mbd4/controller.txt\n>f+++++++++ pod-logs/openstack/ovn-controller-6mbd4/get-gw-enabled.txt\n>f+++++++++ pod-logs/openstack/ovn-controller-6mbd4/init.txt\n>f+++++++++ pod-logs/openstack/ovn-controller-6mbd4/log-parser.txt\n>f+++++++++ pod-logs/openstack/ovn-controller-6mbd4/vector.txt\ncd+++++++++ pod-logs/openstack/ovn-northd-6c6687ddd6-7grhs/\n>f+++++++++ pod-logs/openstack/ovn-northd-6c6687ddd6-7grhs/init.txt\n>f+++++++++ pod-logs/openstack/ovn-northd-6c6687ddd6-7grhs/northd.txt\ncd+++++++++ pod-logs/openstack/ovn-ovsdb-nb-0/\n>f+++++++++ pod-logs/openstack/ovn-ovsdb-nb-0/init.txt\n>f+++++++++ pod-logs/openstack/ovn-ovsdb-nb-0/ovsdb.txt\ncd+++++++++ pod-logs/openstack/ovn-ovsdb-sb-0/\n>f+++++++++ pod-logs/openstack/ovn-ovsdb-sb-0/init.txt\n>f+++++++++ pod-logs/openstack/ovn-ovsdb-sb-0/ovsdb.txt\ncd+++++++++ pod-logs/openstack/percona-xtradb-haproxy-0/\n>f+++++++++ pod-logs/openstack/percona-xtradb-haproxy-0/haproxy-init.txt\n>f+++++++++ pod-logs/openstack/percona-xtradb-haproxy-0/haproxy.txt\n>f+++++++++ pod-logs/openstack/percona-xtradb-haproxy-0/pxc-init.txt\n>f+++++++++ pod-logs/openstack/percona-xtradb-haproxy-0/pxc-monit.txt\ncd+++++++++ pod-logs/openstack/percona-xtradb-pxc-0/\n>f+++++++++ pod-logs/openstack/percona-xtradb-pxc-0/exporter.txt\n>f+++++++++ pod-logs/openstack/percona-xtradb-pxc-0/pxc-init.txt\n>f+++++++++ pod-logs/openstack/percona-xtradb-pxc-0/pxc.txt\ncd+++++++++ pod-logs/openstack/placement-api-75695696c6-brsxj/\n>f+++++++++ pod-logs/openstack/placement-api-75695696c6-brsxj/init.txt\n>f+++++++++ pod-logs/openstack/placement-api-75695696c6-brsxj/placement-api.txt\ncd+++++++++ pod-logs/openstack/placement-db-init-89t92/\n>f+++++++++ pod-logs/openstack/placement-db-init-89t92/init.txt\n>f+++++++++ pod-logs/openstack/placement-db-init-89t92/placement-db-init-0.txt\ncd+++++++++ pod-logs/openstack/placement-db-sync-nvqjv/\n>f+++++++++ pod-logs/openstack/placement-db-sync-nvqjv/init.txt\n>f+++++++++ pod-logs/openstack/placement-db-sync-nvqjv/placement-db-sync.txt\ncd+++++++++ pod-logs/openstack/placement-ks-endpoints-jmfl7/\n>f+++++++++ pod-logs/openstack/placement-ks-endpoints-jmfl7/init.txt\n>f+++++++++ pod-logs/openstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-admin.txt\n>f+++++++++ pod-logs/openstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-internal.txt\n>f+++++++++ pod-logs/openstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-public.txt\ncd+++++++++ pod-logs/openstack/placement-ks-service-qdjdz/\n>f+++++++++ pod-logs/openstack/placement-ks-service-qdjdz/init.txt\n>f+++++++++ pod-logs/openstack/placement-ks-service-qdjdz/placement-ks-service-registration.txt\ncd+++++++++ pod-logs/openstack/placement-ks-user-blkn9/\n>f+++++++++ pod-logs/openstack/placement-ks-user-blkn9/init.txt\n>f+++++++++ pod-logs/openstack/placement-ks-user-blkn9/placement-ks-user.txt\ncd+++++++++ pod-logs/openstack/pxc-operator-7cff949c8b-7zp4j/\n>f+++++++++ pod-logs/openstack/pxc-operator-7cff949c8b-7zp4j/percona-xtradb-cluster-operator.txt\ncd+++++++++ pod-logs/openstack/rabbitmq-barbican-server-0/\n>f+++++++++ pod-logs/openstack/rabbitmq-barbican-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/openstack/rabbitmq-barbican-server-0/setup-container.txt\ncd+++++++++ pod-logs/openstack/rabbitmq-cinder-server-0/\n>f+++++++++ pod-logs/openstack/rabbitmq-cinder-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/openstack/rabbitmq-cinder-server-0/setup-container.txt\ncd+++++++++ pod-logs/openstack/rabbitmq-cluster-operator-5448d56d95-vk9km/\n>f+++++++++ pod-logs/openstack/rabbitmq-cluster-operator-5448d56d95-vk9km/rabbitmq-cluster-operator.txt\ncd+++++++++ pod-logs/openstack/rabbitmq-glance-server-0/\n>f+++++++++ pod-logs/openstack/rabbitmq-glance-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/openstack/rabbitmq-glance-server-0/setup-container.txt\ncd+++++++++ pod-logs/openstack/rabbitmq-heat-server-0/\n>f+++++++++ pod-logs/openstack/rabbitmq-heat-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/openstack/rabbitmq-heat-server-0/setup-container.txt\ncd+++++++++ pod-logs/openstack/rabbitmq-keystone-server-0/\n>f+++++++++ pod-logs/openstack/rabbitmq-keystone-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/openstack/rabbitmq-keystone-server-0/setup-container.txt\ncd+++++++++ pod-logs/openstack/rabbitmq-magnum-server-0/\n>f+++++++++ pod-logs/openstack/rabbitmq-magnum-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/openstack/rabbitmq-magnum-server-0/setup-container.txt\ncd+++++++++ pod-logs/openstack/rabbitmq-manila-server-0/\n>f+++++++++ pod-logs/openstack/rabbitmq-manila-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/openstack/rabbitmq-manila-server-0/setup-container.txt\ncd+++++++++ pod-logs/openstack/rabbitmq-messaging-topology-operator-7f8596f788-84l9x/\n>f+++++++++ pod-logs/openstack/rabbitmq-messaging-topology-operator-7f8596f788-84l9x/rabbitmq-cluster-operator.txt\ncd+++++++++ pod-logs/openstack/rabbitmq-neutron-server-0/\n>f+++++++++ pod-logs/openstack/rabbitmq-neutron-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/openstack/rabbitmq-neutron-server-0/setup-container.txt\ncd+++++++++ pod-logs/openstack/rabbitmq-nova-server-0/\n>f+++++++++ pod-logs/openstack/rabbitmq-nova-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/openstack/rabbitmq-nova-server-0/setup-container.txt\ncd+++++++++ pod-logs/openstack/rabbitmq-octavia-server-0/\n>f+++++++++ pod-logs/openstack/rabbitmq-octavia-server-0/rabbitmq.txt\n>f+++++++++ pod-logs/openstack/rabbitmq-octavia-server-0/setup-container.txt\ncd+++++++++ pod-logs/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/\n>f+++++++++ pod-logs/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/ceph-crash.txt\n>f+++++++++ pod-logs/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/chown-container-data-dir.txt\n>f+++++++++ pod-logs/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/make-container-crash-dir.txt\ncd+++++++++ pod-logs/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/\n>f+++++++++ pod-logs/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/chown-container-data-dir.txt\n>f+++++++++ pod-logs/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/log-collector.txt\n>f+++++++++ pod-logs/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/rgw.txt\ncd+++++++++ pod-logs/openstack/staffeln-api-6669c8779f-qgp4c/\n>f+++++++++ pod-logs/openstack/staffeln-api-6669c8779f-qgp4c/init.txt\n>f+++++++++ pod-logs/openstack/staffeln-api-6669c8779f-qgp4c/staffeln-api.txt\ncd+++++++++ pod-logs/openstack/staffeln-conductor-7b5d99bcd4-ws4sl/\n>f+++++++++ pod-logs/openstack/staffeln-conductor-7b5d99bcd4-ws4sl/staffeln-conductor.txt\ncd+++++++++ pod-logs/openstack/staffeln-db-init-p4pq4/\n>f+++++++++ pod-logs/openstack/staffeln-db-init-p4pq4/init.txt\n>f+++++++++ pod-logs/openstack/staffeln-db-init-p4pq4/staffeln-db-init-0.txt\ncd+++++++++ pod-logs/openstack/staffeln-db-sync-khzx8/\n>f+++++++++ pod-logs/openstack/staffeln-db-sync-khzx8/init.txt\n>f+++++++++ pod-logs/openstack/staffeln-db-sync-khzx8/staffeln-db-sync.txt\ncd+++++++++ pod-logs/openstack/tempest-ks-user-kwbf6/\n>f+++++++++ pod-logs/openstack/tempest-ks-user-kwbf6/init.txt\n>f+++++++++ pod-logs/openstack/tempest-ks-user-kwbf6/tempest-ks-user.txt\ncd+++++++++ pod-logs/openstack/tempest-run-tests-g5plh/\n>f+++++++++ pod-logs/openstack/tempest-run-tests-g5plh/init.txt\n>f+++++++++ pod-logs/openstack/tempest-run-tests-g5plh/tempest-run-tests-init.txt\n>f+++++++++ pod-logs/openstack/tempest-run-tests-g5plh/tempest-run-tests.txt\ncd+++++++++ pod-logs/openstack/valkey-node-0/\n>f+++++++++ pod-logs/openstack/valkey-node-0/metrics.txt\n>f+++++++++ pod-logs/openstack/valkey-node-0/sentinel.txt\n>f+++++++++ pod-logs/openstack/valkey-node-0/valkey.txt\ncd+++++++++ pod-logs/orc-system/\ncd+++++++++ pod-logs/orc-system/orc-controller-manager-6cb597b5d4-glhcz/\n>f+++++++++ pod-logs/orc-system/orc-controller-manager-6cb597b5d4-glhcz/manager.txt\ncd+++++++++ pod-logs/rook-ceph/\ncd+++++++++ pod-logs/rook-ceph/rook-ceph-operator-7b66cfb94c-tj94j/\n>f+++++++++ pod-logs/rook-ceph/rook-ceph-operator-7b66cfb94c-tj94j/rook-ceph-operator.txt\ncd+++++++++ pod-logs/secretgen-controller/\ncd+++++++++ pod-logs/secretgen-controller/secretgen-controller-5cf976ccc7-szs5h/\n>f+++++++++ pod-logs/secretgen-controller/secretgen-controller-5cf976ccc7-szs5h/secretgen-controller.txt\n",
                            "rc": 0,
                            "stdout_lines": [
                                "cd+++++++++ pod-logs/",
                                "cd+++++++++ pod-logs/auth-system/",
                                "cd+++++++++ pod-logs/auth-system/keycloak-0/",
                                ">f+++++++++ pod-logs/auth-system/keycloak-0/keycloak.txt",
                                "cd+++++++++ pod-logs/capi-kubeadm-bootstrap-system/",
                                "cd+++++++++ pod-logs/capi-kubeadm-bootstrap-system/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k/",
                                ">f+++++++++ pod-logs/capi-kubeadm-bootstrap-system/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k/manager.txt",
                                "cd+++++++++ pod-logs/capi-kubeadm-control-plane-system/",
                                "cd+++++++++ pod-logs/capi-kubeadm-control-plane-system/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4/",
                                ">f+++++++++ pod-logs/capi-kubeadm-control-plane-system/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4/manager.txt",
                                "cd+++++++++ pod-logs/capi-system/",
                                "cd+++++++++ pod-logs/capi-system/capi-controller-manager-bc4cf8c95-w8p6b/",
                                ">f+++++++++ pod-logs/capi-system/capi-controller-manager-bc4cf8c95-w8p6b/manager.txt",
                                "cd+++++++++ pod-logs/capo-system/",
                                "cd+++++++++ pod-logs/capo-system/capo-controller-manager-6975759b4b-tkxrs/",
                                ">f+++++++++ pod-logs/capo-system/capo-controller-manager-6975759b4b-tkxrs/manager.txt",
                                "cd+++++++++ pod-logs/cert-manager/",
                                "cd+++++++++ pod-logs/cert-manager/cert-manager-75c4c745bc-45s4r/",
                                ">f+++++++++ pod-logs/cert-manager/cert-manager-75c4c745bc-45s4r/cert-manager-controller.txt",
                                "cd+++++++++ pod-logs/cert-manager/cert-manager-cainjector-64b59ddb75-tl5x7/",
                                ">f+++++++++ pod-logs/cert-manager/cert-manager-cainjector-64b59ddb75-tl5x7/cert-manager-cainjector.txt",
                                "cd+++++++++ pod-logs/cert-manager/cert-manager-webhook-548949fc64-vkrlt/",
                                ">f+++++++++ pod-logs/cert-manager/cert-manager-webhook-548949fc64-vkrlt/cert-manager-webhook.txt",
                                "cd+++++++++ pod-logs/failed-pods/",
                                "cd+++++++++ pod-logs/failed-pods/auth-system/",
                                "cd+++++++++ pod-logs/failed-pods/auth-system/keycloak-0/",
                                ">f+++++++++ pod-logs/failed-pods/auth-system/keycloak-0/keycloak.txt",
                                "cd+++++++++ pod-logs/failed-pods/capi-kubeadm-bootstrap-system/",
                                "cd+++++++++ pod-logs/failed-pods/capi-kubeadm-bootstrap-system/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k/",
                                ">f+++++++++ pod-logs/failed-pods/capi-kubeadm-bootstrap-system/capi-kubeadm-bootstrap-controller-manager-6558cd8d7f-cvt8k/manager.txt",
                                "cd+++++++++ pod-logs/failed-pods/capi-kubeadm-control-plane-system/",
                                "cd+++++++++ pod-logs/failed-pods/capi-kubeadm-control-plane-system/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4/",
                                ">f+++++++++ pod-logs/failed-pods/capi-kubeadm-control-plane-system/capi-kubeadm-control-plane-controller-manager-bdfc6fdd8-6kmd4/manager.txt",
                                "cd+++++++++ pod-logs/failed-pods/capi-system/",
                                "cd+++++++++ pod-logs/failed-pods/capi-system/capi-controller-manager-bc4cf8c95-w8p6b/",
                                ">f+++++++++ pod-logs/failed-pods/capi-system/capi-controller-manager-bc4cf8c95-w8p6b/manager.txt",
                                "cd+++++++++ pod-logs/failed-pods/capo-system/",
                                "cd+++++++++ pod-logs/failed-pods/capo-system/capo-controller-manager-6975759b4b-tkxrs/",
                                ">f+++++++++ pod-logs/failed-pods/capo-system/capo-controller-manager-6975759b4b-tkxrs/manager.txt",
                                "cd+++++++++ pod-logs/failed-pods/cert-manager/",
                                "cd+++++++++ pod-logs/failed-pods/cert-manager/cert-manager-75c4c745bc-45s4r/",
                                ">f+++++++++ pod-logs/failed-pods/cert-manager/cert-manager-75c4c745bc-45s4r/cert-manager-controller.txt",
                                "cd+++++++++ pod-logs/failed-pods/cert-manager/cert-manager-cainjector-64b59ddb75-tl5x7/",
                                ">f+++++++++ pod-logs/failed-pods/cert-manager/cert-manager-cainjector-64b59ddb75-tl5x7/cert-manager-cainjector.txt",
                                "cd+++++++++ pod-logs/failed-pods/cert-manager/cert-manager-webhook-548949fc64-vkrlt/",
                                ">f+++++++++ pod-logs/failed-pods/cert-manager/cert-manager-webhook-548949fc64-vkrlt/cert-manager-webhook.txt",
                                "cd+++++++++ pod-logs/failed-pods/ingress-nginx/",
                                "cd+++++++++ pod-logs/failed-pods/ingress-nginx/ingress-nginx-controller-j4bqv/",
                                ">f+++++++++ pod-logs/failed-pods/ingress-nginx/ingress-nginx-controller-j4bqv/controller.txt",
                                "cd+++++++++ pod-logs/failed-pods/ingress-nginx/ingress-nginx-defaultbackend-6987ff55cf-gpx4l/",
                                ">f+++++++++ pod-logs/failed-pods/ingress-nginx/ingress-nginx-defaultbackend-6987ff55cf-gpx4l/ingress-nginx-default-backend.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/cilium-operator-869df985b8-kszk2/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-operator-869df985b8-kszk2/cilium-operator.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/apply-sysctl-overwrites.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/cilium-agent.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/clean-cilium-state.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/config.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/install-cni-binaries.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/mount-bpf-fs.txt",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/cilium-vdz4f/mount-cgroup.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/coredns-67659f764b-6f2mm/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/coredns-67659f764b-6f2mm/coredns.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/coredns-67659f764b-j6fp4/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/coredns-67659f764b-j6fp4/coredns.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/etcd-instance/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/etcd-instance/etcd.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/kube-apiserver-instance/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/kube-apiserver-instance/kube-apiserver.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/kube-controller-manager-instance/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/kube-controller-manager-instance/kube-controller-manager.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/kube-proxy-sp2vs/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/kube-proxy-sp2vs/kube-proxy.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/kube-scheduler-instance/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/kube-scheduler-instance/kube-scheduler.txt",
                                "cd+++++++++ pod-logs/failed-pods/kube-system/kube-vip-instance/",
                                ">f+++++++++ pod-logs/failed-pods/kube-system/kube-vip-instance/kube-vip.txt",
                                "cd+++++++++ pod-logs/failed-pods/local-path-storage/",
                                "cd+++++++++ pod-logs/failed-pods/local-path-storage/local-path-provisioner-679c578f5-7h8w5/",
                                ">f+++++++++ pod-logs/failed-pods/local-path-storage/local-path-provisioner-679c578f5-7h8w5/local-path-provisioner.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/alertmanager.txt",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/config-reloader.txt",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/init-config-reloader.txt",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/oauth2-proxy.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/goldpinger-7jzp8/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/goldpinger-7jzp8/goldpinger-daemon.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana-sc-dashboard.txt",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana-sc-datasources.txt",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m/kube-state-metrics.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-operator-cd88cf4bf-lzh7g/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-operator-cd88cf4bf-lzh7g/kube-prometheus-stack.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/node-exporter.txt",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/pod-tls-sidecar.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/loki-0/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/loki-0/loki.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/loki-chunks-cache-0/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/loki-chunks-cache-0/exporter.txt",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/loki-chunks-cache-0/memcached.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/loki-gateway-cf54cb88c-zv654/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/loki-gateway-cf54cb88c-zv654/nginx.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/loki-results-cache-0/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/loki-results-cache-0/exporter.txt",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/loki-results-cache-0/memcached.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/node-feature-discovery-gc-6675cbb6d9-zv9sn/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/node-feature-discovery-gc-6675cbb6d9-zv9sn/gc.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/node-feature-discovery-master-8665476dbc-t4z5z/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/node-feature-discovery-master-8665476dbc-t4z5z/master.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/node-feature-discovery-worker-p8lmk/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/node-feature-discovery-worker-p8lmk/worker.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/prometheus-kube-prometheus-stack-prometheus-0/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/prometheus-kube-prometheus-stack-prometheus-0/config-reloader.txt",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/prometheus-kube-prometheus-stack-prometheus-0/init-config-reloader.txt",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/prometheus-kube-prometheus-stack-prometheus-0/oauth2-proxy.txt",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/prometheus-kube-prometheus-stack-prometheus-0/pod-tls-sidecar.txt",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/prometheus-kube-prometheus-stack-prometheus-0/prometheus.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/prometheus-pushgateway-7b8659c68b-28dht/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/prometheus-pushgateway-7b8659c68b-28dht/pushgateway.txt",
                                "cd+++++++++ pod-logs/failed-pods/monitoring/vector-qzjms/",
                                ">f+++++++++ pod-logs/failed-pods/monitoring/vector-qzjms/vector.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/",
                                "cd+++++++++ pod-logs/failed-pods/openstack/barbican-api-775987496d-z6jqv/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-api-775987496d-z6jqv/barbican-api.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-api-775987496d-z6jqv/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/barbican-db-init-nm8k6/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-db-init-nm8k6/barbican-db-init-0.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-db-init-nm8k6/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/barbican-db-sync-452x5/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-db-sync-452x5/barbican-db-sync.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-db-sync-452x5/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/barbican-ks-endpoints-w2ffg/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-endpoints-w2ffg/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-public.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/barbican-ks-service-8pm7j/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-service-8pm7j/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-service-8pm7j/key-manager-ks-service-registration.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/barbican-ks-user-fszfr/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-user-fszfr/barbican-ks-user.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-ks-user-fszfr/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/barbican-rabbit-init-j5qmd/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-rabbit-init-j5qmd/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/barbican-rabbit-init-j5qmd/rabbit-init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/cinder-api-86d7694f66-j97gj/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-api-86d7694f66-j97gj/ceph-coordination-volume-perms.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-api-86d7694f66-j97gj/cinder-api.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-api-86d7694f66-j97gj/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/cinder-backup-dcfd7dfb7-sdwkc/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-backup-keyring-placement.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-coordination-volume-perms.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-keyring-placement-rbd1.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-dcfd7dfb7-sdwkc/cinder-backup.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-dcfd7dfb7-sdwkc/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/cinder-backup-storage-init-zmnkh/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-storage-init-zmnkh/ceph-keyring-placement.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-storage-init-zmnkh/cinder-backup-storage-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-backup-storage-init-zmnkh/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/cinder-bootstrap-wng86/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-bootstrap-wng86/bootstrap.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-bootstrap-wng86/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/cinder-create-internal-tenant-6vgll/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-create-internal-tenant-6vgll/create-internal-tenant.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-create-internal-tenant-6vgll/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/cinder-db-init-mzm5b/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-db-init-mzm5b/cinder-db-init-0.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-db-init-mzm5b/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/cinder-db-sync-mz6ls/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-db-sync-mz6ls/cinder-db-sync.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-db-sync-mz6ls/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/cinder-ks-endpoints-xv2tb/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-endpoints-xv2tb/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-public.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/cinder-ks-service-dlcxz/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-service-dlcxz/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-service-dlcxz/volumev3-ks-service-registration.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/cinder-ks-user-5bd5g/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-user-5bd5g/cinder-ks-user.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-ks-user-5bd5g/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/cinder-rabbit-init-l4fpm/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-rabbit-init-l4fpm/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-rabbit-init-l4fpm/rabbit-init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/cinder-scheduler-586f444995-p7grf/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-scheduler-586f444995-p7grf/ceph-coordination-volume-perms.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-scheduler-586f444995-p7grf/cinder-scheduler.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-scheduler-586f444995-p7grf/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/cinder-storage-init-vt6br/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-storage-init-vt6br/ceph-keyring-placement.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-storage-init-vt6br/cinder-storage-init-rbd1.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-storage-init-vt6br/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/cinder-volume-66dc847979-qgp4l/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-volume-66dc847979-qgp4l/ceph-coordination-volume-perms.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-volume-66dc847979-qgp4l/ceph-keyring-placement-rbd1.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-volume-66dc847979-qgp4l/cinder-volume.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-volume-66dc847979-qgp4l/init-cinder-conf.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-volume-66dc847979-qgp4l/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/cinder-volume-usage-audit-29540045-jbmvh/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-volume-usage-audit-29540045-jbmvh/cinder-volume-usage-audit.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/cinder-volume-usage-audit-29540045-jbmvh/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/glance-api-65d579bfc8-6x76l/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-api-65d579bfc8-6x76l/ceph-keyring-placement.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-api-65d579bfc8-6x76l/glance-api.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-api-65d579bfc8-6x76l/glance-perms.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-api-65d579bfc8-6x76l/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/glance-db-init-wbpff/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-db-init-wbpff/glance-db-init-0.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-db-init-wbpff/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/glance-db-sync-gk84f/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-db-sync-gk84f/glance-db-sync.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-db-sync-gk84f/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/glance-ks-endpoints-dq2cc/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-public.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-ks-endpoints-dq2cc/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/glance-ks-service-5h6bw/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-ks-service-5h6bw/image-ks-service-registration.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-ks-service-5h6bw/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/glance-ks-user-lcfxr/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-ks-user-lcfxr/glance-ks-user.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-ks-user-lcfxr/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/glance-metadefs-load-476tp/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-metadefs-load-476tp/glance-metadefs-load.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-metadefs-load-476tp/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/glance-rabbit-init-c6rjt/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-rabbit-init-c6rjt/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-rabbit-init-c6rjt/rabbit-init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/glance-storage-init-hdcpc/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-storage-init-hdcpc/ceph-keyring-placement.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-storage-init-hdcpc/glance-storage-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/glance-storage-init-hdcpc/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/heat-api-6d65f9477-kmbkl/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-api-6d65f9477-kmbkl/heat-api.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-api-6d65f9477-kmbkl/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/heat-bootstrap-9dwg2/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-bootstrap-9dwg2/bootstrap.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-bootstrap-9dwg2/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/heat-cfn-f44db7787-t8f7m/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-cfn-f44db7787-t8f7m/heat-cfn.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-cfn-f44db7787-t8f7m/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/heat-db-init-fk8qw/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-db-init-fk8qw/heat-db-init-0.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-db-init-fk8qw/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/heat-db-sync-cxmcb/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-db-sync-cxmcb/heat-db-sync.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-db-sync-cxmcb/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/heat-domain-ks-user-tq2c5/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-domain-ks-user-tq2c5/heat-ks-domain-user.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-domain-ks-user-tq2c5/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/heat-engine-64f8b77bfb-wngkr/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-engine-64f8b77bfb-wngkr/heat-engine.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-engine-64f8b77bfb-wngkr/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540055-p9pq9/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540055-p9pq9/heat-engine-cleaner.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540055-p9pq9/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540060-z4g95/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540060-z4g95/heat-engine-cleaner.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540060-z4g95/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540065-rcjr2/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540065-rcjr2/heat-engine-cleaner.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-engine-cleaner-29540065-rcjr2/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-public.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-public.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/heat-ks-service-8pxqz/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-ks-service-8pxqz/cloudformation-ks-service-registration.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-ks-service-8pxqz/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-ks-service-8pxqz/orchestration-ks-service-registration.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/heat-ks-user-tfk98/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-ks-user-tfk98/heat-ks-user.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-ks-user-tfk98/heat-trustee-ks-user.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-ks-user-tfk98/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/heat-rabbit-init-rbl9n/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-rabbit-init-rbl9n/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-rabbit-init-rbl9n/rabbit-init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/heat-trusts-czrrv/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-trusts-czrrv/heat-trusts.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/heat-trusts-czrrv/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/horizon-8cdd7b888-bvzvx/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/horizon-8cdd7b888-bvzvx/horizon.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/horizon-8cdd7b888-bvzvx/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/horizon-db-init-s5pbw/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/horizon-db-init-s5pbw/horizon-db-init-0.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/horizon-db-init-s5pbw/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/horizon-db-sync-bgr2g/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/horizon-db-sync-bgr2g/horizon-db-sync.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/horizon-db-sync-bgr2g/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/keepalived-7jdfz/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keepalived-7jdfz/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keepalived-7jdfz/keepalived.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keepalived-7jdfz/wait-for-ip.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/keystone-api-c4656754c-mqbxm/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-api-c4656754c-mqbxm/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-api-c4656754c-mqbxm/keystone-api.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/keystone-bootstrap-mdtrx/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-bootstrap-mdtrx/bootstrap.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-bootstrap-mdtrx/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/keystone-credential-setup-6xsvx/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-credential-setup-6xsvx/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-credential-setup-6xsvx/keystone-credential-setup.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/keystone-db-init-z5mwz/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-db-init-z5mwz/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-db-init-z5mwz/keystone-db-init-0.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/keystone-db-sync-zsq8z/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-db-sync-zsq8z/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-db-sync-zsq8z/keystone-db-sync.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/keystone-domain-manage-v865d/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-domain-manage-v865d/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-domain-manage-v865d/keystone-domain-manage.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/keystone-fernet-setup-5rfqs/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-fernet-setup-5rfqs/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-fernet-setup-5rfqs/keystone-fernet-setup.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/keystone-rabbit-init-m44qz/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-rabbit-init-m44qz/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/keystone-rabbit-init-m44qz/rabbit-init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/ceph-admin-keyring-placement.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/ceph-keyring-placement.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/init-dynamic-options.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/libvirt-exporter.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/libvirt.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/libvirt-libvirt-default-6bgrg/tls-sidecar.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/magnum-api-8549df7884-9b2zc/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-api-8549df7884-9b2zc/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-api-8549df7884-9b2zc/magnum-api.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/magnum-cluster-api-proxy-z2flh/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-cluster-api-proxy-z2flh/magnum-cluster-api-proxy.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/magnum-conductor-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-conductor-0/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-conductor-0/magnum-conductor-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-conductor-0/magnum-conductor.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/magnum-db-init-dshrc/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-db-init-dshrc/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-db-init-dshrc/magnum-db-init-0.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/magnum-db-sync-8ttpk/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-db-sync-8ttpk/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-db-sync-8ttpk/magnum-db-sync.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/magnum-domain-ks-user-vp8f2/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-domain-ks-user-vp8f2/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-domain-ks-user-vp8f2/magnum-ks-domain-user.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/magnum-ks-endpoints-jvzvf/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-public.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-endpoints-jvzvf/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/magnum-ks-service-vdn67/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-service-vdn67/container-infra-ks-service-registration.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-service-vdn67/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/magnum-ks-user-4wvtj/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-user-4wvtj/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-ks-user-4wvtj/magnum-ks-user.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/magnum-rabbit-init-w7jc7/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-rabbit-init-w7jc7/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-rabbit-init-w7jc7/rabbit-init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/magnum-registry-c45778976-2zz96/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/magnum-registry-c45778976-2zz96/registry.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/manila-api-5cdf958bd9-hmbmb/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-api-5cdf958bd9-hmbmb/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-api-5cdf958bd9-hmbmb/manila-api.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/manila-bootstrap-5wn97/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-bootstrap-5wn97/bootstrap.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-bootstrap-5wn97/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/manila-data-75cbc955bd-27jjw/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-data-75cbc955bd-27jjw/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-data-75cbc955bd-27jjw/manila-data.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/manila-db-init-pbdm8/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-db-init-pbdm8/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-db-init-pbdm8/manila-db-init-0.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/manila-db-sync-rm9mz/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-db-sync-rm9mz/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-db-sync-rm9mz/manila-db-sync.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-public.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-public.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/manila-ks-service-g7svt/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-ks-service-g7svt/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-ks-service-g7svt/share-ks-service-registration.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-ks-service-g7svt/sharev2-ks-service-registration.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/manila-ks-user-pr9mg/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-ks-user-pr9mg/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-ks-user-pr9mg/manila-ks-user.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/manila-rabbit-init-74vjs/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-rabbit-init-74vjs/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-rabbit-init-74vjs/rabbit-init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/manila-scheduler-5b584c8656-mmnnd/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-scheduler-5b584c8656-mmnnd/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-scheduler-5b584c8656-mmnnd/manila-scheduler.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/manila-share-68879775b-rc6q9/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-share-68879775b-rc6q9/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-share-68879775b-rc6q9/manila-share-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/manila-share-68879775b-rc6q9/manila-share.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/memcached-memcached-6479589586-9sxjx/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/memcached-memcached-6479589586-9sxjx/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/memcached-memcached-6479589586-9sxjx/memcached-exporter.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/memcached-memcached-6479589586-9sxjx/memcached.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/neutron-db-init-l7c9v/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-db-init-l7c9v/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-db-init-l7c9v/neutron-db-init-0.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/neutron-db-sync-brwb5/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-db-sync-brwb5/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-db-sync-brwb5/neutron-db-sync.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/neutron-ks-endpoints-dstkg/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-endpoints-dstkg/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-public.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/neutron-ks-service-sq4tp/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-service-sq4tp/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-service-sq4tp/network-ks-service-registration.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/neutron-ks-user-kcfc4/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-user-kcfc4/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-ks-user-kcfc4/neutron-ks-user.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/neutron-netns-cleanup-cron-default-8frwf/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-netns-cleanup-cron-default-8frwf/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-netns-cleanup-cron-default-8frwf/neutron-netns-cleanup-cron.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/neutron-ovn-metadata-agent-default-flhb5/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-ovn-metadata-agent-default-flhb5/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-ovn-metadata-agent-default-flhb5/neutron-metadata-agent-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-ovn-metadata-agent-default-flhb5/neutron-ovn-metadata-agent.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-ovn-metadata-agent-default-flhb5/ovn-neutron-init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/neutron-rabbit-init-rdnbf/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-rabbit-init-rdnbf/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-rabbit-init-rdnbf/rabbit-init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/neutron-server-649c5974f6-5dkvl/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-server-649c5974f6-5dkvl/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-server-649c5974f6-5dkvl/neutron-policy-server.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-server-649c5974f6-5dkvl/neutron-server.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/neutron-server-649c5974f6-5dkvl/ovn-neutron-init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-api-metadata-546d94ddd7-btnrc/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-api-metadata-546d94ddd7-btnrc/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-api-metadata-546d94ddd7-btnrc/nova-api-metadata-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-api-metadata-546d94ddd7-btnrc/nova-api.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-api-osapi-99c7b7cd8-2lnzr/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-api-osapi-99c7b7cd8-2lnzr/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-api-osapi-99c7b7cd8-2lnzr/nova-osapi.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-bootstrap-trzqq/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-bootstrap-trzqq/bootstrap.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-bootstrap-trzqq/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-cell-setup-29540040-rtzd7/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-cell-setup-29540040-rtzd7/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-cell-setup-29540040-rtzd7/nova-cell-setup.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-cell-setup-j97qh/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-cell-setup-j97qh/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-cell-setup-j97qh/nova-cell-setup-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-cell-setup-j97qh/nova-cell-setup.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/ceph-admin-keyring-placement.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/ceph-keyring-placement.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/ceph-perms.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/nova-compute-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/nova-compute-ssh-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/nova-compute-ssh.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/nova-compute-vnc-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-compute-default-2v5pd/nova-compute.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-conductor-5474cb4b8d-bxzhq/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-conductor-5474cb4b8d-bxzhq/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-conductor-5474cb4b8d-bxzhq/nova-conductor.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-db-init-b4sqh/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-db-init-b4sqh/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-db-init-b4sqh/nova-db-init-0.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-db-init-b4sqh/nova-db-init-1.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-db-init-b4sqh/nova-db-init-2.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-db-sync-2rbjc/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-db-sync-2rbjc/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-db-sync-2rbjc/nova-db-sync.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-ks-endpoints-zwcm6/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-public.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-ks-endpoints-zwcm6/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-ks-service-fmj77/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-ks-service-fmj77/compute-ks-service-registration.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-ks-service-fmj77/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-ks-user-t8xgz/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-ks-user-t8xgz/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-ks-user-t8xgz/nova-ks-user.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-novncproxy-85dd5b5965-z6hmj/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-novncproxy-85dd5b5965-z6hmj/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy-init-assets.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-rabbit-init-szpvx/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-rabbit-init-szpvx/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-rabbit-init-szpvx/rabbit-init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-scheduler-78775555d4-hb2j9/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-scheduler-78775555d4-hb2j9/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-scheduler-78775555d4-hb2j9/nova-scheduler.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/nova-service-cleaner-29540040-cxdd4/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-service-cleaner-29540040-cxdd4/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/nova-service-cleaner-29540040-cxdd4/nova-service-cleaner.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/octavia-api-75db6578cf-m656r/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-api-75db6578cf-m656r/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-api-75db6578cf-m656r/octavia-api.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/octavia-bootstrap-kwfv2/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-bootstrap-kwfv2/bootstrap.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-bootstrap-kwfv2/init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/octavia-db-init-wnz5h/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-db-init-wnz5h/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-db-init-wnz5h/octavia-db-init-0.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-db-init-wnz5h/octavia-db-init-1.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/octavia-db-sync-rjq45/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-db-sync-rjq45/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-db-sync-rjq45/octavia-db-sync.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/octavia-health-manager-default-twmks/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-health-manager-default-twmks/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-health-manager-default-twmks/octavia-health-manager-get-port.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-health-manager-default-twmks/octavia-health-manager-nic-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-health-manager-default-twmks/octavia-health-manager.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/octavia-housekeeping-87b98c47b-vqwct/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-housekeeping-87b98c47b-vqwct/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-housekeeping-87b98c47b-vqwct/octavia-housekeeping.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/octavia-ks-endpoints-jdlzw/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-endpoints-jdlzw/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-public.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/octavia-ks-service-rkdp9/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-service-rkdp9/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-service-rkdp9/load-balancer-ks-service-registration.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/octavia-ks-user-tjl52/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-user-tjl52/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-ks-user-tjl52/octavia-ks-user.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/octavia-rabbit-init-vdqxf/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-rabbit-init-vdqxf/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-rabbit-init-vdqxf/rabbit-init.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/octavia-worker-774cddbcdc-qxl6k/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-worker-774cddbcdc-qxl6k/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/octavia-worker-774cddbcdc-qxl6k/octavia-worker.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/openstack-database-exporter-7c944bc9f-w2bdb/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/openstack-database-exporter-7c944bc9f-w2bdb/openstack-database-exporter.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/openstack-exporter-74676fb4b4-jrkwh/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/openstack-exporter-74676fb4b4-jrkwh/build-config.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/openstack-exporter-74676fb4b4-jrkwh/openstack-exporter.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/openvswitch-gj98d/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/openvswitch-gj98d/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/openvswitch-gj98d/openvswitch-db-perms.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/openvswitch-gj98d/openvswitch-db.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/openvswitch-gj98d/openvswitch-vswitchd-modules.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/openvswitch-gj98d/openvswitch-vswitchd.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/ovn-controller-6mbd4/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/ovn-controller-6mbd4/controller-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/ovn-controller-6mbd4/controller.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/ovn-controller-6mbd4/get-gw-enabled.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/ovn-controller-6mbd4/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/ovn-controller-6mbd4/log-parser.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/ovn-controller-6mbd4/vector.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/ovn-northd-6c6687ddd6-7grhs/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/ovn-northd-6c6687ddd6-7grhs/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/ovn-northd-6c6687ddd6-7grhs/northd.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/ovn-ovsdb-nb-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/ovn-ovsdb-nb-0/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/ovn-ovsdb-nb-0/ovsdb.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/ovn-ovsdb-sb-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/ovn-ovsdb-sb-0/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/ovn-ovsdb-sb-0/ovsdb.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-haproxy-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-haproxy-0/haproxy-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-haproxy-0/haproxy.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-haproxy-0/pxc-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-haproxy-0/pxc-monit.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-pxc-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-pxc-0/exporter.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-pxc-0/pxc-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/percona-xtradb-pxc-0/pxc.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/placement-api-75695696c6-brsxj/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/placement-api-75695696c6-brsxj/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/placement-api-75695696c6-brsxj/placement-api.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/placement-db-init-89t92/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/placement-db-init-89t92/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/placement-db-init-89t92/placement-db-init-0.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/placement-db-sync-nvqjv/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/placement-db-sync-nvqjv/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/placement-db-sync-nvqjv/placement-db-sync.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/placement-ks-endpoints-jmfl7/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/placement-ks-endpoints-jmfl7/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-public.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/placement-ks-service-qdjdz/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/placement-ks-service-qdjdz/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/placement-ks-service-qdjdz/placement-ks-service-registration.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/placement-ks-user-blkn9/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/placement-ks-user-blkn9/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/placement-ks-user-blkn9/placement-ks-user.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/pxc-operator-7cff949c8b-7zp4j/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/pxc-operator-7cff949c8b-7zp4j/percona-xtradb-cluster-operator.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-barbican-server-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-barbican-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-barbican-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-cinder-server-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-cinder-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-cinder-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-cluster-operator-5448d56d95-vk9km/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-cluster-operator-5448d56d95-vk9km/rabbitmq-cluster-operator.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-glance-server-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-glance-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-glance-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-heat-server-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-heat-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-heat-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-keystone-server-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-keystone-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-keystone-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-magnum-server-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-magnum-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-magnum-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-manila-server-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-manila-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-manila-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-messaging-topology-operator-7f8596f788-84l9x/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-messaging-topology-operator-7f8596f788-84l9x/rabbitmq-cluster-operator.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-neutron-server-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-neutron-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-neutron-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-nova-server-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-nova-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-nova-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/rabbitmq-octavia-server-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-octavia-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rabbitmq-octavia-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/ceph-crash.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/chown-container-data-dir.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/make-container-crash-dir.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/chown-container-data-dir.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/log-collector.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/rgw.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/staffeln-api-6669c8779f-qgp4c/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/staffeln-api-6669c8779f-qgp4c/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/staffeln-api-6669c8779f-qgp4c/staffeln-api.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/staffeln-conductor-7b5d99bcd4-ws4sl/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/staffeln-conductor-7b5d99bcd4-ws4sl/staffeln-conductor.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/staffeln-db-init-p4pq4/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/staffeln-db-init-p4pq4/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/staffeln-db-init-p4pq4/staffeln-db-init-0.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/staffeln-db-sync-khzx8/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/staffeln-db-sync-khzx8/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/staffeln-db-sync-khzx8/staffeln-db-sync.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/tempest-ks-user-kwbf6/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/tempest-ks-user-kwbf6/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/tempest-ks-user-kwbf6/tempest-ks-user.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/tempest-run-tests-g5plh/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/tempest-run-tests-g5plh/init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/tempest-run-tests-g5plh/tempest-run-tests-init.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/tempest-run-tests-g5plh/tempest-run-tests.txt",
                                "cd+++++++++ pod-logs/failed-pods/openstack/valkey-node-0/",
                                ">f+++++++++ pod-logs/failed-pods/openstack/valkey-node-0/metrics.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/valkey-node-0/sentinel.txt",
                                ">f+++++++++ pod-logs/failed-pods/openstack/valkey-node-0/valkey.txt",
                                "cd+++++++++ pod-logs/failed-pods/orc-system/",
                                "cd+++++++++ pod-logs/failed-pods/orc-system/orc-controller-manager-6cb597b5d4-glhcz/",
                                ">f+++++++++ pod-logs/failed-pods/orc-system/orc-controller-manager-6cb597b5d4-glhcz/manager.txt",
                                "cd+++++++++ pod-logs/failed-pods/rook-ceph/",
                                "cd+++++++++ pod-logs/failed-pods/rook-ceph/rook-ceph-operator-7b66cfb94c-tj94j/",
                                ">f+++++++++ pod-logs/failed-pods/rook-ceph/rook-ceph-operator-7b66cfb94c-tj94j/rook-ceph-operator.txt",
                                "cd+++++++++ pod-logs/failed-pods/secretgen-controller/",
                                "cd+++++++++ pod-logs/failed-pods/secretgen-controller/secretgen-controller-5cf976ccc7-szs5h/",
                                ">f+++++++++ pod-logs/failed-pods/secretgen-controller/secretgen-controller-5cf976ccc7-szs5h/secretgen-controller.txt",
                                "cd+++++++++ pod-logs/ingress-nginx/",
                                "cd+++++++++ pod-logs/ingress-nginx/ingress-nginx-controller-j4bqv/",
                                ">f+++++++++ pod-logs/ingress-nginx/ingress-nginx-controller-j4bqv/controller.txt",
                                "cd+++++++++ pod-logs/ingress-nginx/ingress-nginx-defaultbackend-6987ff55cf-gpx4l/",
                                ">f+++++++++ pod-logs/ingress-nginx/ingress-nginx-defaultbackend-6987ff55cf-gpx4l/ingress-nginx-default-backend.txt",
                                "cd+++++++++ pod-logs/kube-system/",
                                "cd+++++++++ pod-logs/kube-system/cilium-operator-869df985b8-kszk2/",
                                ">f+++++++++ pod-logs/kube-system/cilium-operator-869df985b8-kszk2/cilium-operator.txt",
                                "cd+++++++++ pod-logs/kube-system/cilium-vdz4f/",
                                ">f+++++++++ pod-logs/kube-system/cilium-vdz4f/apply-sysctl-overwrites.txt",
                                ">f+++++++++ pod-logs/kube-system/cilium-vdz4f/cilium-agent.txt",
                                ">f+++++++++ pod-logs/kube-system/cilium-vdz4f/clean-cilium-state.txt",
                                ">f+++++++++ pod-logs/kube-system/cilium-vdz4f/config.txt",
                                ">f+++++++++ pod-logs/kube-system/cilium-vdz4f/install-cni-binaries.txt",
                                ">f+++++++++ pod-logs/kube-system/cilium-vdz4f/mount-bpf-fs.txt",
                                ">f+++++++++ pod-logs/kube-system/cilium-vdz4f/mount-cgroup.txt",
                                "cd+++++++++ pod-logs/kube-system/coredns-67659f764b-6f2mm/",
                                ">f+++++++++ pod-logs/kube-system/coredns-67659f764b-6f2mm/coredns.txt",
                                "cd+++++++++ pod-logs/kube-system/coredns-67659f764b-j6fp4/",
                                ">f+++++++++ pod-logs/kube-system/coredns-67659f764b-j6fp4/coredns.txt",
                                "cd+++++++++ pod-logs/kube-system/etcd-instance/",
                                ">f+++++++++ pod-logs/kube-system/etcd-instance/etcd.txt",
                                "cd+++++++++ pod-logs/kube-system/kube-apiserver-instance/",
                                ">f+++++++++ pod-logs/kube-system/kube-apiserver-instance/kube-apiserver.txt",
                                "cd+++++++++ pod-logs/kube-system/kube-controller-manager-instance/",
                                ">f+++++++++ pod-logs/kube-system/kube-controller-manager-instance/kube-controller-manager.txt",
                                "cd+++++++++ pod-logs/kube-system/kube-proxy-sp2vs/",
                                ">f+++++++++ pod-logs/kube-system/kube-proxy-sp2vs/kube-proxy.txt",
                                "cd+++++++++ pod-logs/kube-system/kube-scheduler-instance/",
                                ">f+++++++++ pod-logs/kube-system/kube-scheduler-instance/kube-scheduler.txt",
                                "cd+++++++++ pod-logs/kube-system/kube-vip-instance/",
                                ">f+++++++++ pod-logs/kube-system/kube-vip-instance/kube-vip.txt",
                                "cd+++++++++ pod-logs/local-path-storage/",
                                "cd+++++++++ pod-logs/local-path-storage/local-path-provisioner-679c578f5-7h8w5/",
                                ">f+++++++++ pod-logs/local-path-storage/local-path-provisioner-679c578f5-7h8w5/local-path-provisioner.txt",
                                "cd+++++++++ pod-logs/monitoring/",
                                "cd+++++++++ pod-logs/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/",
                                ">f+++++++++ pod-logs/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/alertmanager.txt",
                                ">f+++++++++ pod-logs/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/config-reloader.txt",
                                ">f+++++++++ pod-logs/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/init-config-reloader.txt",
                                ">f+++++++++ pod-logs/monitoring/alertmanager-kube-prometheus-stack-alertmanager-0/oauth2-proxy.txt",
                                "cd+++++++++ pod-logs/monitoring/goldpinger-7jzp8/",
                                ">f+++++++++ pod-logs/monitoring/goldpinger-7jzp8/goldpinger-daemon.txt",
                                "cd+++++++++ pod-logs/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/",
                                ">f+++++++++ pod-logs/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana-sc-dashboard.txt",
                                ">f+++++++++ pod-logs/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana-sc-datasources.txt",
                                ">f+++++++++ pod-logs/monitoring/kube-prometheus-stack-grafana-668bfb9659-ft52b/grafana.txt",
                                "cd+++++++++ pod-logs/monitoring/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m/",
                                ">f+++++++++ pod-logs/monitoring/kube-prometheus-stack-kube-state-metrics-5c97764fc9-w682m/kube-state-metrics.txt",
                                "cd+++++++++ pod-logs/monitoring/kube-prometheus-stack-operator-cd88cf4bf-lzh7g/",
                                ">f+++++++++ pod-logs/monitoring/kube-prometheus-stack-operator-cd88cf4bf-lzh7g/kube-prometheus-stack.txt",
                                "cd+++++++++ pod-logs/monitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/",
                                ">f+++++++++ pod-logs/monitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/node-exporter.txt",
                                ">f+++++++++ pod-logs/monitoring/kube-prometheus-stack-prometheus-node-exporter-59qlm/pod-tls-sidecar.txt",
                                "cd+++++++++ pod-logs/monitoring/loki-0/",
                                ">f+++++++++ pod-logs/monitoring/loki-0/loki.txt",
                                "cd+++++++++ pod-logs/monitoring/loki-chunks-cache-0/",
                                ">f+++++++++ pod-logs/monitoring/loki-chunks-cache-0/exporter.txt",
                                ">f+++++++++ pod-logs/monitoring/loki-chunks-cache-0/memcached.txt",
                                "cd+++++++++ pod-logs/monitoring/loki-gateway-cf54cb88c-zv654/",
                                ">f+++++++++ pod-logs/monitoring/loki-gateway-cf54cb88c-zv654/nginx.txt",
                                "cd+++++++++ pod-logs/monitoring/loki-results-cache-0/",
                                ">f+++++++++ pod-logs/monitoring/loki-results-cache-0/exporter.txt",
                                ">f+++++++++ pod-logs/monitoring/loki-results-cache-0/memcached.txt",
                                "cd+++++++++ pod-logs/monitoring/node-feature-discovery-gc-6675cbb6d9-zv9sn/",
                                ">f+++++++++ pod-logs/monitoring/node-feature-discovery-gc-6675cbb6d9-zv9sn/gc.txt",
                                "cd+++++++++ pod-logs/monitoring/node-feature-discovery-master-8665476dbc-t4z5z/",
                                ">f+++++++++ pod-logs/monitoring/node-feature-discovery-master-8665476dbc-t4z5z/master.txt",
                                "cd+++++++++ pod-logs/monitoring/node-feature-discovery-worker-p8lmk/",
                                ">f+++++++++ pod-logs/monitoring/node-feature-discovery-worker-p8lmk/worker.txt",
                                "cd+++++++++ pod-logs/monitoring/prometheus-kube-prometheus-stack-prometheus-0/",
                                ">f+++++++++ pod-logs/monitoring/prometheus-kube-prometheus-stack-prometheus-0/config-reloader.txt",
                                ">f+++++++++ pod-logs/monitoring/prometheus-kube-prometheus-stack-prometheus-0/init-config-reloader.txt",
                                ">f+++++++++ pod-logs/monitoring/prometheus-kube-prometheus-stack-prometheus-0/oauth2-proxy.txt",
                                ">f+++++++++ pod-logs/monitoring/prometheus-kube-prometheus-stack-prometheus-0/pod-tls-sidecar.txt",
                                ">f+++++++++ pod-logs/monitoring/prometheus-kube-prometheus-stack-prometheus-0/prometheus.txt",
                                "cd+++++++++ pod-logs/monitoring/prometheus-pushgateway-7b8659c68b-28dht/",
                                ">f+++++++++ pod-logs/monitoring/prometheus-pushgateway-7b8659c68b-28dht/pushgateway.txt",
                                "cd+++++++++ pod-logs/monitoring/vector-qzjms/",
                                ">f+++++++++ pod-logs/monitoring/vector-qzjms/vector.txt",
                                "cd+++++++++ pod-logs/openstack/",
                                "cd+++++++++ pod-logs/openstack/barbican-api-775987496d-z6jqv/",
                                ">f+++++++++ pod-logs/openstack/barbican-api-775987496d-z6jqv/barbican-api.txt",
                                ">f+++++++++ pod-logs/openstack/barbican-api-775987496d-z6jqv/init.txt",
                                "cd+++++++++ pod-logs/openstack/barbican-db-init-nm8k6/",
                                ">f+++++++++ pod-logs/openstack/barbican-db-init-nm8k6/barbican-db-init-0.txt",
                                ">f+++++++++ pod-logs/openstack/barbican-db-init-nm8k6/init.txt",
                                "cd+++++++++ pod-logs/openstack/barbican-db-sync-452x5/",
                                ">f+++++++++ pod-logs/openstack/barbican-db-sync-452x5/barbican-db-sync.txt",
                                ">f+++++++++ pod-logs/openstack/barbican-db-sync-452x5/init.txt",
                                "cd+++++++++ pod-logs/openstack/barbican-ks-endpoints-w2ffg/",
                                ">f+++++++++ pod-logs/openstack/barbican-ks-endpoints-w2ffg/init.txt",
                                ">f+++++++++ pod-logs/openstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/openstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/openstack/barbican-ks-endpoints-w2ffg/key-manager-ks-endpoints-public.txt",
                                "cd+++++++++ pod-logs/openstack/barbican-ks-service-8pm7j/",
                                ">f+++++++++ pod-logs/openstack/barbican-ks-service-8pm7j/init.txt",
                                ">f+++++++++ pod-logs/openstack/barbican-ks-service-8pm7j/key-manager-ks-service-registration.txt",
                                "cd+++++++++ pod-logs/openstack/barbican-ks-user-fszfr/",
                                ">f+++++++++ pod-logs/openstack/barbican-ks-user-fszfr/barbican-ks-user.txt",
                                ">f+++++++++ pod-logs/openstack/barbican-ks-user-fszfr/init.txt",
                                "cd+++++++++ pod-logs/openstack/barbican-rabbit-init-j5qmd/",
                                ">f+++++++++ pod-logs/openstack/barbican-rabbit-init-j5qmd/init.txt",
                                ">f+++++++++ pod-logs/openstack/barbican-rabbit-init-j5qmd/rabbit-init.txt",
                                "cd+++++++++ pod-logs/openstack/cinder-api-86d7694f66-j97gj/",
                                ">f+++++++++ pod-logs/openstack/cinder-api-86d7694f66-j97gj/ceph-coordination-volume-perms.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-api-86d7694f66-j97gj/cinder-api.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-api-86d7694f66-j97gj/init.txt",
                                "cd+++++++++ pod-logs/openstack/cinder-backup-dcfd7dfb7-sdwkc/",
                                ">f+++++++++ pod-logs/openstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-backup-keyring-placement.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-coordination-volume-perms.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-backup-dcfd7dfb7-sdwkc/ceph-keyring-placement-rbd1.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-backup-dcfd7dfb7-sdwkc/cinder-backup.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-backup-dcfd7dfb7-sdwkc/init.txt",
                                "cd+++++++++ pod-logs/openstack/cinder-backup-storage-init-zmnkh/",
                                ">f+++++++++ pod-logs/openstack/cinder-backup-storage-init-zmnkh/ceph-keyring-placement.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-backup-storage-init-zmnkh/cinder-backup-storage-init.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-backup-storage-init-zmnkh/init.txt",
                                "cd+++++++++ pod-logs/openstack/cinder-bootstrap-wng86/",
                                ">f+++++++++ pod-logs/openstack/cinder-bootstrap-wng86/bootstrap.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-bootstrap-wng86/init.txt",
                                "cd+++++++++ pod-logs/openstack/cinder-create-internal-tenant-6vgll/",
                                ">f+++++++++ pod-logs/openstack/cinder-create-internal-tenant-6vgll/create-internal-tenant.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-create-internal-tenant-6vgll/init.txt",
                                "cd+++++++++ pod-logs/openstack/cinder-db-init-mzm5b/",
                                ">f+++++++++ pod-logs/openstack/cinder-db-init-mzm5b/cinder-db-init-0.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-db-init-mzm5b/init.txt",
                                "cd+++++++++ pod-logs/openstack/cinder-db-sync-mz6ls/",
                                ">f+++++++++ pod-logs/openstack/cinder-db-sync-mz6ls/cinder-db-sync.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-db-sync-mz6ls/init.txt",
                                "cd+++++++++ pod-logs/openstack/cinder-ks-endpoints-xv2tb/",
                                ">f+++++++++ pod-logs/openstack/cinder-ks-endpoints-xv2tb/init.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-ks-endpoints-xv2tb/volumev3-ks-endpoints-public.txt",
                                "cd+++++++++ pod-logs/openstack/cinder-ks-service-dlcxz/",
                                ">f+++++++++ pod-logs/openstack/cinder-ks-service-dlcxz/init.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-ks-service-dlcxz/volumev3-ks-service-registration.txt",
                                "cd+++++++++ pod-logs/openstack/cinder-ks-user-5bd5g/",
                                ">f+++++++++ pod-logs/openstack/cinder-ks-user-5bd5g/cinder-ks-user.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-ks-user-5bd5g/init.txt",
                                "cd+++++++++ pod-logs/openstack/cinder-rabbit-init-l4fpm/",
                                ">f+++++++++ pod-logs/openstack/cinder-rabbit-init-l4fpm/init.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-rabbit-init-l4fpm/rabbit-init.txt",
                                "cd+++++++++ pod-logs/openstack/cinder-scheduler-586f444995-p7grf/",
                                ">f+++++++++ pod-logs/openstack/cinder-scheduler-586f444995-p7grf/ceph-coordination-volume-perms.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-scheduler-586f444995-p7grf/cinder-scheduler.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-scheduler-586f444995-p7grf/init.txt",
                                "cd+++++++++ pod-logs/openstack/cinder-storage-init-vt6br/",
                                ">f+++++++++ pod-logs/openstack/cinder-storage-init-vt6br/ceph-keyring-placement.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-storage-init-vt6br/cinder-storage-init-rbd1.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-storage-init-vt6br/init.txt",
                                "cd+++++++++ pod-logs/openstack/cinder-volume-66dc847979-qgp4l/",
                                ">f+++++++++ pod-logs/openstack/cinder-volume-66dc847979-qgp4l/ceph-coordination-volume-perms.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-volume-66dc847979-qgp4l/ceph-keyring-placement-rbd1.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-volume-66dc847979-qgp4l/cinder-volume.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-volume-66dc847979-qgp4l/init-cinder-conf.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-volume-66dc847979-qgp4l/init.txt",
                                "cd+++++++++ pod-logs/openstack/cinder-volume-usage-audit-29540045-jbmvh/",
                                ">f+++++++++ pod-logs/openstack/cinder-volume-usage-audit-29540045-jbmvh/cinder-volume-usage-audit.txt",
                                ">f+++++++++ pod-logs/openstack/cinder-volume-usage-audit-29540045-jbmvh/init.txt",
                                "cd+++++++++ pod-logs/openstack/glance-api-65d579bfc8-6x76l/",
                                ">f+++++++++ pod-logs/openstack/glance-api-65d579bfc8-6x76l/ceph-keyring-placement.txt",
                                ">f+++++++++ pod-logs/openstack/glance-api-65d579bfc8-6x76l/glance-api.txt",
                                ">f+++++++++ pod-logs/openstack/glance-api-65d579bfc8-6x76l/glance-perms.txt",
                                ">f+++++++++ pod-logs/openstack/glance-api-65d579bfc8-6x76l/init.txt",
                                "cd+++++++++ pod-logs/openstack/glance-db-init-wbpff/",
                                ">f+++++++++ pod-logs/openstack/glance-db-init-wbpff/glance-db-init-0.txt",
                                ">f+++++++++ pod-logs/openstack/glance-db-init-wbpff/init.txt",
                                "cd+++++++++ pod-logs/openstack/glance-db-sync-gk84f/",
                                ">f+++++++++ pod-logs/openstack/glance-db-sync-gk84f/glance-db-sync.txt",
                                ">f+++++++++ pod-logs/openstack/glance-db-sync-gk84f/init.txt",
                                "cd+++++++++ pod-logs/openstack/glance-ks-endpoints-dq2cc/",
                                ">f+++++++++ pod-logs/openstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/openstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/openstack/glance-ks-endpoints-dq2cc/image-ks-endpoints-public.txt",
                                ">f+++++++++ pod-logs/openstack/glance-ks-endpoints-dq2cc/init.txt",
                                "cd+++++++++ pod-logs/openstack/glance-ks-service-5h6bw/",
                                ">f+++++++++ pod-logs/openstack/glance-ks-service-5h6bw/image-ks-service-registration.txt",
                                ">f+++++++++ pod-logs/openstack/glance-ks-service-5h6bw/init.txt",
                                "cd+++++++++ pod-logs/openstack/glance-ks-user-lcfxr/",
                                ">f+++++++++ pod-logs/openstack/glance-ks-user-lcfxr/glance-ks-user.txt",
                                ">f+++++++++ pod-logs/openstack/glance-ks-user-lcfxr/init.txt",
                                "cd+++++++++ pod-logs/openstack/glance-metadefs-load-476tp/",
                                ">f+++++++++ pod-logs/openstack/glance-metadefs-load-476tp/glance-metadefs-load.txt",
                                ">f+++++++++ pod-logs/openstack/glance-metadefs-load-476tp/init.txt",
                                "cd+++++++++ pod-logs/openstack/glance-rabbit-init-c6rjt/",
                                ">f+++++++++ pod-logs/openstack/glance-rabbit-init-c6rjt/init.txt",
                                ">f+++++++++ pod-logs/openstack/glance-rabbit-init-c6rjt/rabbit-init.txt",
                                "cd+++++++++ pod-logs/openstack/glance-storage-init-hdcpc/",
                                ">f+++++++++ pod-logs/openstack/glance-storage-init-hdcpc/ceph-keyring-placement.txt",
                                ">f+++++++++ pod-logs/openstack/glance-storage-init-hdcpc/glance-storage-init.txt",
                                ">f+++++++++ pod-logs/openstack/glance-storage-init-hdcpc/init.txt",
                                "cd+++++++++ pod-logs/openstack/heat-api-6d65f9477-kmbkl/",
                                ">f+++++++++ pod-logs/openstack/heat-api-6d65f9477-kmbkl/heat-api.txt",
                                ">f+++++++++ pod-logs/openstack/heat-api-6d65f9477-kmbkl/init.txt",
                                "cd+++++++++ pod-logs/openstack/heat-bootstrap-9dwg2/",
                                ">f+++++++++ pod-logs/openstack/heat-bootstrap-9dwg2/bootstrap.txt",
                                ">f+++++++++ pod-logs/openstack/heat-bootstrap-9dwg2/init.txt",
                                "cd+++++++++ pod-logs/openstack/heat-cfn-f44db7787-t8f7m/",
                                ">f+++++++++ pod-logs/openstack/heat-cfn-f44db7787-t8f7m/heat-cfn.txt",
                                ">f+++++++++ pod-logs/openstack/heat-cfn-f44db7787-t8f7m/init.txt",
                                "cd+++++++++ pod-logs/openstack/heat-db-init-fk8qw/",
                                ">f+++++++++ pod-logs/openstack/heat-db-init-fk8qw/heat-db-init-0.txt",
                                ">f+++++++++ pod-logs/openstack/heat-db-init-fk8qw/init.txt",
                                "cd+++++++++ pod-logs/openstack/heat-db-sync-cxmcb/",
                                ">f+++++++++ pod-logs/openstack/heat-db-sync-cxmcb/heat-db-sync.txt",
                                ">f+++++++++ pod-logs/openstack/heat-db-sync-cxmcb/init.txt",
                                "cd+++++++++ pod-logs/openstack/heat-domain-ks-user-tq2c5/",
                                ">f+++++++++ pod-logs/openstack/heat-domain-ks-user-tq2c5/heat-ks-domain-user.txt",
                                ">f+++++++++ pod-logs/openstack/heat-domain-ks-user-tq2c5/init.txt",
                                "cd+++++++++ pod-logs/openstack/heat-engine-64f8b77bfb-wngkr/",
                                ">f+++++++++ pod-logs/openstack/heat-engine-64f8b77bfb-wngkr/heat-engine.txt",
                                ">f+++++++++ pod-logs/openstack/heat-engine-64f8b77bfb-wngkr/init.txt",
                                "cd+++++++++ pod-logs/openstack/heat-engine-cleaner-29540055-p9pq9/",
                                ">f+++++++++ pod-logs/openstack/heat-engine-cleaner-29540055-p9pq9/heat-engine-cleaner.txt",
                                ">f+++++++++ pod-logs/openstack/heat-engine-cleaner-29540055-p9pq9/init.txt",
                                "cd+++++++++ pod-logs/openstack/heat-engine-cleaner-29540060-z4g95/",
                                ">f+++++++++ pod-logs/openstack/heat-engine-cleaner-29540060-z4g95/heat-engine-cleaner.txt",
                                ">f+++++++++ pod-logs/openstack/heat-engine-cleaner-29540060-z4g95/init.txt",
                                "cd+++++++++ pod-logs/openstack/heat-engine-cleaner-29540065-rcjr2/",
                                ">f+++++++++ pod-logs/openstack/heat-engine-cleaner-29540065-rcjr2/heat-engine-cleaner.txt",
                                ">f+++++++++ pod-logs/openstack/heat-engine-cleaner-29540065-rcjr2/init.txt",
                                "cd+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/",
                                ">f+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/cloudformation-ks-endpoints-public.txt",
                                ">f+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/init.txt",
                                ">f+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/openstack/heat-ks-endpoints-wwzbz/orchestration-ks-endpoints-public.txt",
                                "cd+++++++++ pod-logs/openstack/heat-ks-service-8pxqz/",
                                ">f+++++++++ pod-logs/openstack/heat-ks-service-8pxqz/cloudformation-ks-service-registration.txt",
                                ">f+++++++++ pod-logs/openstack/heat-ks-service-8pxqz/init.txt",
                                ">f+++++++++ pod-logs/openstack/heat-ks-service-8pxqz/orchestration-ks-service-registration.txt",
                                "cd+++++++++ pod-logs/openstack/heat-ks-user-tfk98/",
                                ">f+++++++++ pod-logs/openstack/heat-ks-user-tfk98/heat-ks-user.txt",
                                ">f+++++++++ pod-logs/openstack/heat-ks-user-tfk98/heat-trustee-ks-user.txt",
                                ">f+++++++++ pod-logs/openstack/heat-ks-user-tfk98/init.txt",
                                "cd+++++++++ pod-logs/openstack/heat-rabbit-init-rbl9n/",
                                ">f+++++++++ pod-logs/openstack/heat-rabbit-init-rbl9n/init.txt",
                                ">f+++++++++ pod-logs/openstack/heat-rabbit-init-rbl9n/rabbit-init.txt",
                                "cd+++++++++ pod-logs/openstack/heat-trusts-czrrv/",
                                ">f+++++++++ pod-logs/openstack/heat-trusts-czrrv/heat-trusts.txt",
                                ">f+++++++++ pod-logs/openstack/heat-trusts-czrrv/init.txt",
                                "cd+++++++++ pod-logs/openstack/horizon-8cdd7b888-bvzvx/",
                                ">f+++++++++ pod-logs/openstack/horizon-8cdd7b888-bvzvx/horizon.txt",
                                ">f+++++++++ pod-logs/openstack/horizon-8cdd7b888-bvzvx/init.txt",
                                "cd+++++++++ pod-logs/openstack/horizon-db-init-s5pbw/",
                                ">f+++++++++ pod-logs/openstack/horizon-db-init-s5pbw/horizon-db-init-0.txt",
                                ">f+++++++++ pod-logs/openstack/horizon-db-init-s5pbw/init.txt",
                                "cd+++++++++ pod-logs/openstack/horizon-db-sync-bgr2g/",
                                ">f+++++++++ pod-logs/openstack/horizon-db-sync-bgr2g/horizon-db-sync.txt",
                                ">f+++++++++ pod-logs/openstack/horizon-db-sync-bgr2g/init.txt",
                                "cd+++++++++ pod-logs/openstack/keepalived-7jdfz/",
                                ">f+++++++++ pod-logs/openstack/keepalived-7jdfz/init.txt",
                                ">f+++++++++ pod-logs/openstack/keepalived-7jdfz/keepalived.txt",
                                ">f+++++++++ pod-logs/openstack/keepalived-7jdfz/wait-for-ip.txt",
                                "cd+++++++++ pod-logs/openstack/keystone-api-c4656754c-mqbxm/",
                                ">f+++++++++ pod-logs/openstack/keystone-api-c4656754c-mqbxm/init.txt",
                                ">f+++++++++ pod-logs/openstack/keystone-api-c4656754c-mqbxm/keystone-api.txt",
                                "cd+++++++++ pod-logs/openstack/keystone-bootstrap-mdtrx/",
                                ">f+++++++++ pod-logs/openstack/keystone-bootstrap-mdtrx/bootstrap.txt",
                                ">f+++++++++ pod-logs/openstack/keystone-bootstrap-mdtrx/init.txt",
                                "cd+++++++++ pod-logs/openstack/keystone-credential-setup-6xsvx/",
                                ">f+++++++++ pod-logs/openstack/keystone-credential-setup-6xsvx/init.txt",
                                ">f+++++++++ pod-logs/openstack/keystone-credential-setup-6xsvx/keystone-credential-setup.txt",
                                "cd+++++++++ pod-logs/openstack/keystone-db-init-z5mwz/",
                                ">f+++++++++ pod-logs/openstack/keystone-db-init-z5mwz/init.txt",
                                ">f+++++++++ pod-logs/openstack/keystone-db-init-z5mwz/keystone-db-init-0.txt",
                                "cd+++++++++ pod-logs/openstack/keystone-db-sync-zsq8z/",
                                ">f+++++++++ pod-logs/openstack/keystone-db-sync-zsq8z/init.txt",
                                ">f+++++++++ pod-logs/openstack/keystone-db-sync-zsq8z/keystone-db-sync.txt",
                                "cd+++++++++ pod-logs/openstack/keystone-domain-manage-v865d/",
                                ">f+++++++++ pod-logs/openstack/keystone-domain-manage-v865d/init.txt",
                                ">f+++++++++ pod-logs/openstack/keystone-domain-manage-v865d/keystone-domain-manage.txt",
                                "cd+++++++++ pod-logs/openstack/keystone-fernet-setup-5rfqs/",
                                ">f+++++++++ pod-logs/openstack/keystone-fernet-setup-5rfqs/init.txt",
                                ">f+++++++++ pod-logs/openstack/keystone-fernet-setup-5rfqs/keystone-fernet-setup.txt",
                                "cd+++++++++ pod-logs/openstack/keystone-rabbit-init-m44qz/",
                                ">f+++++++++ pod-logs/openstack/keystone-rabbit-init-m44qz/init.txt",
                                ">f+++++++++ pod-logs/openstack/keystone-rabbit-init-m44qz/rabbit-init.txt",
                                "cd+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/",
                                ">f+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/ceph-admin-keyring-placement.txt",
                                ">f+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/ceph-keyring-placement.txt",
                                ">f+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/init-dynamic-options.txt",
                                ">f+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/init.txt",
                                ">f+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/libvirt-exporter.txt",
                                ">f+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/libvirt.txt",
                                ">f+++++++++ pod-logs/openstack/libvirt-libvirt-default-6bgrg/tls-sidecar.txt",
                                "cd+++++++++ pod-logs/openstack/magnum-api-8549df7884-9b2zc/",
                                ">f+++++++++ pod-logs/openstack/magnum-api-8549df7884-9b2zc/init.txt",
                                ">f+++++++++ pod-logs/openstack/magnum-api-8549df7884-9b2zc/magnum-api.txt",
                                "cd+++++++++ pod-logs/openstack/magnum-cluster-api-proxy-z2flh/",
                                ">f+++++++++ pod-logs/openstack/magnum-cluster-api-proxy-z2flh/magnum-cluster-api-proxy.txt",
                                "cd+++++++++ pod-logs/openstack/magnum-conductor-0/",
                                ">f+++++++++ pod-logs/openstack/magnum-conductor-0/init.txt",
                                ">f+++++++++ pod-logs/openstack/magnum-conductor-0/magnum-conductor-init.txt",
                                ">f+++++++++ pod-logs/openstack/magnum-conductor-0/magnum-conductor.txt",
                                "cd+++++++++ pod-logs/openstack/magnum-db-init-dshrc/",
                                ">f+++++++++ pod-logs/openstack/magnum-db-init-dshrc/init.txt",
                                ">f+++++++++ pod-logs/openstack/magnum-db-init-dshrc/magnum-db-init-0.txt",
                                "cd+++++++++ pod-logs/openstack/magnum-db-sync-8ttpk/",
                                ">f+++++++++ pod-logs/openstack/magnum-db-sync-8ttpk/init.txt",
                                ">f+++++++++ pod-logs/openstack/magnum-db-sync-8ttpk/magnum-db-sync.txt",
                                "cd+++++++++ pod-logs/openstack/magnum-domain-ks-user-vp8f2/",
                                ">f+++++++++ pod-logs/openstack/magnum-domain-ks-user-vp8f2/init.txt",
                                ">f+++++++++ pod-logs/openstack/magnum-domain-ks-user-vp8f2/magnum-ks-domain-user.txt",
                                "cd+++++++++ pod-logs/openstack/magnum-ks-endpoints-jvzvf/",
                                ">f+++++++++ pod-logs/openstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/openstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/openstack/magnum-ks-endpoints-jvzvf/container-infra-ks-endpoints-public.txt",
                                ">f+++++++++ pod-logs/openstack/magnum-ks-endpoints-jvzvf/init.txt",
                                "cd+++++++++ pod-logs/openstack/magnum-ks-service-vdn67/",
                                ">f+++++++++ pod-logs/openstack/magnum-ks-service-vdn67/container-infra-ks-service-registration.txt",
                                ">f+++++++++ pod-logs/openstack/magnum-ks-service-vdn67/init.txt",
                                "cd+++++++++ pod-logs/openstack/magnum-ks-user-4wvtj/",
                                ">f+++++++++ pod-logs/openstack/magnum-ks-user-4wvtj/init.txt",
                                ">f+++++++++ pod-logs/openstack/magnum-ks-user-4wvtj/magnum-ks-user.txt",
                                "cd+++++++++ pod-logs/openstack/magnum-rabbit-init-w7jc7/",
                                ">f+++++++++ pod-logs/openstack/magnum-rabbit-init-w7jc7/init.txt",
                                ">f+++++++++ pod-logs/openstack/magnum-rabbit-init-w7jc7/rabbit-init.txt",
                                "cd+++++++++ pod-logs/openstack/magnum-registry-c45778976-2zz96/",
                                ">f+++++++++ pod-logs/openstack/magnum-registry-c45778976-2zz96/registry.txt",
                                "cd+++++++++ pod-logs/openstack/manila-api-5cdf958bd9-hmbmb/",
                                ">f+++++++++ pod-logs/openstack/manila-api-5cdf958bd9-hmbmb/init.txt",
                                ">f+++++++++ pod-logs/openstack/manila-api-5cdf958bd9-hmbmb/manila-api.txt",
                                "cd+++++++++ pod-logs/openstack/manila-bootstrap-5wn97/",
                                ">f+++++++++ pod-logs/openstack/manila-bootstrap-5wn97/bootstrap.txt",
                                ">f+++++++++ pod-logs/openstack/manila-bootstrap-5wn97/init.txt",
                                "cd+++++++++ pod-logs/openstack/manila-data-75cbc955bd-27jjw/",
                                ">f+++++++++ pod-logs/openstack/manila-data-75cbc955bd-27jjw/init.txt",
                                ">f+++++++++ pod-logs/openstack/manila-data-75cbc955bd-27jjw/manila-data.txt",
                                "cd+++++++++ pod-logs/openstack/manila-db-init-pbdm8/",
                                ">f+++++++++ pod-logs/openstack/manila-db-init-pbdm8/init.txt",
                                ">f+++++++++ pod-logs/openstack/manila-db-init-pbdm8/manila-db-init-0.txt",
                                "cd+++++++++ pod-logs/openstack/manila-db-sync-rm9mz/",
                                ">f+++++++++ pod-logs/openstack/manila-db-sync-rm9mz/init.txt",
                                ">f+++++++++ pod-logs/openstack/manila-db-sync-rm9mz/manila-db-sync.txt",
                                "cd+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/",
                                ">f+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/init.txt",
                                ">f+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/share-ks-endpoints-public.txt",
                                ">f+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/openstack/manila-ks-endpoints-d8nr9/sharev2-ks-endpoints-public.txt",
                                "cd+++++++++ pod-logs/openstack/manila-ks-service-g7svt/",
                                ">f+++++++++ pod-logs/openstack/manila-ks-service-g7svt/init.txt",
                                ">f+++++++++ pod-logs/openstack/manila-ks-service-g7svt/share-ks-service-registration.txt",
                                ">f+++++++++ pod-logs/openstack/manila-ks-service-g7svt/sharev2-ks-service-registration.txt",
                                "cd+++++++++ pod-logs/openstack/manila-ks-user-pr9mg/",
                                ">f+++++++++ pod-logs/openstack/manila-ks-user-pr9mg/init.txt",
                                ">f+++++++++ pod-logs/openstack/manila-ks-user-pr9mg/manila-ks-user.txt",
                                "cd+++++++++ pod-logs/openstack/manila-rabbit-init-74vjs/",
                                ">f+++++++++ pod-logs/openstack/manila-rabbit-init-74vjs/init.txt",
                                ">f+++++++++ pod-logs/openstack/manila-rabbit-init-74vjs/rabbit-init.txt",
                                "cd+++++++++ pod-logs/openstack/manila-scheduler-5b584c8656-mmnnd/",
                                ">f+++++++++ pod-logs/openstack/manila-scheduler-5b584c8656-mmnnd/init.txt",
                                ">f+++++++++ pod-logs/openstack/manila-scheduler-5b584c8656-mmnnd/manila-scheduler.txt",
                                "cd+++++++++ pod-logs/openstack/manila-share-68879775b-rc6q9/",
                                ">f+++++++++ pod-logs/openstack/manila-share-68879775b-rc6q9/init.txt",
                                ">f+++++++++ pod-logs/openstack/manila-share-68879775b-rc6q9/manila-share-init.txt",
                                ">f+++++++++ pod-logs/openstack/manila-share-68879775b-rc6q9/manila-share.txt",
                                "cd+++++++++ pod-logs/openstack/memcached-memcached-6479589586-9sxjx/",
                                ">f+++++++++ pod-logs/openstack/memcached-memcached-6479589586-9sxjx/init.txt",
                                ">f+++++++++ pod-logs/openstack/memcached-memcached-6479589586-9sxjx/memcached-exporter.txt",
                                ">f+++++++++ pod-logs/openstack/memcached-memcached-6479589586-9sxjx/memcached.txt",
                                "cd+++++++++ pod-logs/openstack/neutron-db-init-l7c9v/",
                                ">f+++++++++ pod-logs/openstack/neutron-db-init-l7c9v/init.txt",
                                ">f+++++++++ pod-logs/openstack/neutron-db-init-l7c9v/neutron-db-init-0.txt",
                                "cd+++++++++ pod-logs/openstack/neutron-db-sync-brwb5/",
                                ">f+++++++++ pod-logs/openstack/neutron-db-sync-brwb5/init.txt",
                                ">f+++++++++ pod-logs/openstack/neutron-db-sync-brwb5/neutron-db-sync.txt",
                                "cd+++++++++ pod-logs/openstack/neutron-ks-endpoints-dstkg/",
                                ">f+++++++++ pod-logs/openstack/neutron-ks-endpoints-dstkg/init.txt",
                                ">f+++++++++ pod-logs/openstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/openstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/openstack/neutron-ks-endpoints-dstkg/network-ks-endpoints-public.txt",
                                "cd+++++++++ pod-logs/openstack/neutron-ks-service-sq4tp/",
                                ">f+++++++++ pod-logs/openstack/neutron-ks-service-sq4tp/init.txt",
                                ">f+++++++++ pod-logs/openstack/neutron-ks-service-sq4tp/network-ks-service-registration.txt",
                                "cd+++++++++ pod-logs/openstack/neutron-ks-user-kcfc4/",
                                ">f+++++++++ pod-logs/openstack/neutron-ks-user-kcfc4/init.txt",
                                ">f+++++++++ pod-logs/openstack/neutron-ks-user-kcfc4/neutron-ks-user.txt",
                                "cd+++++++++ pod-logs/openstack/neutron-netns-cleanup-cron-default-8frwf/",
                                ">f+++++++++ pod-logs/openstack/neutron-netns-cleanup-cron-default-8frwf/init.txt",
                                ">f+++++++++ pod-logs/openstack/neutron-netns-cleanup-cron-default-8frwf/neutron-netns-cleanup-cron.txt",
                                "cd+++++++++ pod-logs/openstack/neutron-ovn-metadata-agent-default-flhb5/",
                                ">f+++++++++ pod-logs/openstack/neutron-ovn-metadata-agent-default-flhb5/init.txt",
                                ">f+++++++++ pod-logs/openstack/neutron-ovn-metadata-agent-default-flhb5/neutron-metadata-agent-init.txt",
                                ">f+++++++++ pod-logs/openstack/neutron-ovn-metadata-agent-default-flhb5/neutron-ovn-metadata-agent.txt",
                                ">f+++++++++ pod-logs/openstack/neutron-ovn-metadata-agent-default-flhb5/ovn-neutron-init.txt",
                                "cd+++++++++ pod-logs/openstack/neutron-rabbit-init-rdnbf/",
                                ">f+++++++++ pod-logs/openstack/neutron-rabbit-init-rdnbf/init.txt",
                                ">f+++++++++ pod-logs/openstack/neutron-rabbit-init-rdnbf/rabbit-init.txt",
                                "cd+++++++++ pod-logs/openstack/neutron-server-649c5974f6-5dkvl/",
                                ">f+++++++++ pod-logs/openstack/neutron-server-649c5974f6-5dkvl/init.txt",
                                ">f+++++++++ pod-logs/openstack/neutron-server-649c5974f6-5dkvl/neutron-policy-server.txt",
                                ">f+++++++++ pod-logs/openstack/neutron-server-649c5974f6-5dkvl/neutron-server.txt",
                                ">f+++++++++ pod-logs/openstack/neutron-server-649c5974f6-5dkvl/ovn-neutron-init.txt",
                                "cd+++++++++ pod-logs/openstack/nova-api-metadata-546d94ddd7-btnrc/",
                                ">f+++++++++ pod-logs/openstack/nova-api-metadata-546d94ddd7-btnrc/init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-api-metadata-546d94ddd7-btnrc/nova-api-metadata-init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-api-metadata-546d94ddd7-btnrc/nova-api.txt",
                                "cd+++++++++ pod-logs/openstack/nova-api-osapi-99c7b7cd8-2lnzr/",
                                ">f+++++++++ pod-logs/openstack/nova-api-osapi-99c7b7cd8-2lnzr/init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-api-osapi-99c7b7cd8-2lnzr/nova-osapi.txt",
                                "cd+++++++++ pod-logs/openstack/nova-bootstrap-trzqq/",
                                ">f+++++++++ pod-logs/openstack/nova-bootstrap-trzqq/bootstrap.txt",
                                ">f+++++++++ pod-logs/openstack/nova-bootstrap-trzqq/init.txt",
                                "cd+++++++++ pod-logs/openstack/nova-cell-setup-29540040-rtzd7/",
                                ">f+++++++++ pod-logs/openstack/nova-cell-setup-29540040-rtzd7/init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-cell-setup-29540040-rtzd7/nova-cell-setup.txt",
                                "cd+++++++++ pod-logs/openstack/nova-cell-setup-j97qh/",
                                ">f+++++++++ pod-logs/openstack/nova-cell-setup-j97qh/init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-cell-setup-j97qh/nova-cell-setup-init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-cell-setup-j97qh/nova-cell-setup.txt",
                                "cd+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/",
                                ">f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/ceph-admin-keyring-placement.txt",
                                ">f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/ceph-keyring-placement.txt",
                                ">f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/ceph-perms.txt",
                                ">f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/nova-compute-init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/nova-compute-ssh-init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/nova-compute-ssh.txt",
                                ">f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/nova-compute-vnc-init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-compute-default-2v5pd/nova-compute.txt",
                                "cd+++++++++ pod-logs/openstack/nova-conductor-5474cb4b8d-bxzhq/",
                                ">f+++++++++ pod-logs/openstack/nova-conductor-5474cb4b8d-bxzhq/init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-conductor-5474cb4b8d-bxzhq/nova-conductor.txt",
                                "cd+++++++++ pod-logs/openstack/nova-db-init-b4sqh/",
                                ">f+++++++++ pod-logs/openstack/nova-db-init-b4sqh/init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-db-init-b4sqh/nova-db-init-0.txt",
                                ">f+++++++++ pod-logs/openstack/nova-db-init-b4sqh/nova-db-init-1.txt",
                                ">f+++++++++ pod-logs/openstack/nova-db-init-b4sqh/nova-db-init-2.txt",
                                "cd+++++++++ pod-logs/openstack/nova-db-sync-2rbjc/",
                                ">f+++++++++ pod-logs/openstack/nova-db-sync-2rbjc/init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-db-sync-2rbjc/nova-db-sync.txt",
                                "cd+++++++++ pod-logs/openstack/nova-ks-endpoints-zwcm6/",
                                ">f+++++++++ pod-logs/openstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/openstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/openstack/nova-ks-endpoints-zwcm6/compute-ks-endpoints-public.txt",
                                ">f+++++++++ pod-logs/openstack/nova-ks-endpoints-zwcm6/init.txt",
                                "cd+++++++++ pod-logs/openstack/nova-ks-service-fmj77/",
                                ">f+++++++++ pod-logs/openstack/nova-ks-service-fmj77/compute-ks-service-registration.txt",
                                ">f+++++++++ pod-logs/openstack/nova-ks-service-fmj77/init.txt",
                                "cd+++++++++ pod-logs/openstack/nova-ks-user-t8xgz/",
                                ">f+++++++++ pod-logs/openstack/nova-ks-user-t8xgz/init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-ks-user-t8xgz/nova-ks-user.txt",
                                "cd+++++++++ pod-logs/openstack/nova-novncproxy-85dd5b5965-z6hmj/",
                                ">f+++++++++ pod-logs/openstack/nova-novncproxy-85dd5b5965-z6hmj/init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy-init-assets.txt",
                                ">f+++++++++ pod-logs/openstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy-init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-novncproxy-85dd5b5965-z6hmj/nova-novncproxy.txt",
                                "cd+++++++++ pod-logs/openstack/nova-rabbit-init-szpvx/",
                                ">f+++++++++ pod-logs/openstack/nova-rabbit-init-szpvx/init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-rabbit-init-szpvx/rabbit-init.txt",
                                "cd+++++++++ pod-logs/openstack/nova-scheduler-78775555d4-hb2j9/",
                                ">f+++++++++ pod-logs/openstack/nova-scheduler-78775555d4-hb2j9/init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-scheduler-78775555d4-hb2j9/nova-scheduler.txt",
                                "cd+++++++++ pod-logs/openstack/nova-service-cleaner-29540040-cxdd4/",
                                ">f+++++++++ pod-logs/openstack/nova-service-cleaner-29540040-cxdd4/init.txt",
                                ">f+++++++++ pod-logs/openstack/nova-service-cleaner-29540040-cxdd4/nova-service-cleaner.txt",
                                "cd+++++++++ pod-logs/openstack/octavia-api-75db6578cf-m656r/",
                                ">f+++++++++ pod-logs/openstack/octavia-api-75db6578cf-m656r/init.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-api-75db6578cf-m656r/octavia-api.txt",
                                "cd+++++++++ pod-logs/openstack/octavia-bootstrap-kwfv2/",
                                ">f+++++++++ pod-logs/openstack/octavia-bootstrap-kwfv2/bootstrap.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-bootstrap-kwfv2/init.txt",
                                "cd+++++++++ pod-logs/openstack/octavia-db-init-wnz5h/",
                                ">f+++++++++ pod-logs/openstack/octavia-db-init-wnz5h/init.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-db-init-wnz5h/octavia-db-init-0.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-db-init-wnz5h/octavia-db-init-1.txt",
                                "cd+++++++++ pod-logs/openstack/octavia-db-sync-rjq45/",
                                ">f+++++++++ pod-logs/openstack/octavia-db-sync-rjq45/init.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-db-sync-rjq45/octavia-db-sync.txt",
                                "cd+++++++++ pod-logs/openstack/octavia-health-manager-default-twmks/",
                                ">f+++++++++ pod-logs/openstack/octavia-health-manager-default-twmks/init.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-health-manager-default-twmks/octavia-health-manager-get-port.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-health-manager-default-twmks/octavia-health-manager-nic-init.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-health-manager-default-twmks/octavia-health-manager.txt",
                                "cd+++++++++ pod-logs/openstack/octavia-housekeeping-87b98c47b-vqwct/",
                                ">f+++++++++ pod-logs/openstack/octavia-housekeeping-87b98c47b-vqwct/init.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-housekeeping-87b98c47b-vqwct/octavia-housekeeping.txt",
                                "cd+++++++++ pod-logs/openstack/octavia-ks-endpoints-jdlzw/",
                                ">f+++++++++ pod-logs/openstack/octavia-ks-endpoints-jdlzw/init.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-ks-endpoints-jdlzw/load-balancer-ks-endpoints-public.txt",
                                "cd+++++++++ pod-logs/openstack/octavia-ks-service-rkdp9/",
                                ">f+++++++++ pod-logs/openstack/octavia-ks-service-rkdp9/init.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-ks-service-rkdp9/load-balancer-ks-service-registration.txt",
                                "cd+++++++++ pod-logs/openstack/octavia-ks-user-tjl52/",
                                ">f+++++++++ pod-logs/openstack/octavia-ks-user-tjl52/init.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-ks-user-tjl52/octavia-ks-user.txt",
                                "cd+++++++++ pod-logs/openstack/octavia-rabbit-init-vdqxf/",
                                ">f+++++++++ pod-logs/openstack/octavia-rabbit-init-vdqxf/init.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-rabbit-init-vdqxf/rabbit-init.txt",
                                "cd+++++++++ pod-logs/openstack/octavia-worker-774cddbcdc-qxl6k/",
                                ">f+++++++++ pod-logs/openstack/octavia-worker-774cddbcdc-qxl6k/init.txt",
                                ">f+++++++++ pod-logs/openstack/octavia-worker-774cddbcdc-qxl6k/octavia-worker.txt",
                                "cd+++++++++ pod-logs/openstack/openstack-database-exporter-7c944bc9f-w2bdb/",
                                ">f+++++++++ pod-logs/openstack/openstack-database-exporter-7c944bc9f-w2bdb/openstack-database-exporter.txt",
                                "cd+++++++++ pod-logs/openstack/openstack-exporter-74676fb4b4-jrkwh/",
                                ">f+++++++++ pod-logs/openstack/openstack-exporter-74676fb4b4-jrkwh/build-config.txt",
                                ">f+++++++++ pod-logs/openstack/openstack-exporter-74676fb4b4-jrkwh/openstack-exporter.txt",
                                "cd+++++++++ pod-logs/openstack/openvswitch-gj98d/",
                                ">f+++++++++ pod-logs/openstack/openvswitch-gj98d/init.txt",
                                ">f+++++++++ pod-logs/openstack/openvswitch-gj98d/openvswitch-db-perms.txt",
                                ">f+++++++++ pod-logs/openstack/openvswitch-gj98d/openvswitch-db.txt",
                                ">f+++++++++ pod-logs/openstack/openvswitch-gj98d/openvswitch-vswitchd-modules.txt",
                                ">f+++++++++ pod-logs/openstack/openvswitch-gj98d/openvswitch-vswitchd.txt",
                                "cd+++++++++ pod-logs/openstack/ovn-controller-6mbd4/",
                                ">f+++++++++ pod-logs/openstack/ovn-controller-6mbd4/controller-init.txt",
                                ">f+++++++++ pod-logs/openstack/ovn-controller-6mbd4/controller.txt",
                                ">f+++++++++ pod-logs/openstack/ovn-controller-6mbd4/get-gw-enabled.txt",
                                ">f+++++++++ pod-logs/openstack/ovn-controller-6mbd4/init.txt",
                                ">f+++++++++ pod-logs/openstack/ovn-controller-6mbd4/log-parser.txt",
                                ">f+++++++++ pod-logs/openstack/ovn-controller-6mbd4/vector.txt",
                                "cd+++++++++ pod-logs/openstack/ovn-northd-6c6687ddd6-7grhs/",
                                ">f+++++++++ pod-logs/openstack/ovn-northd-6c6687ddd6-7grhs/init.txt",
                                ">f+++++++++ pod-logs/openstack/ovn-northd-6c6687ddd6-7grhs/northd.txt",
                                "cd+++++++++ pod-logs/openstack/ovn-ovsdb-nb-0/",
                                ">f+++++++++ pod-logs/openstack/ovn-ovsdb-nb-0/init.txt",
                                ">f+++++++++ pod-logs/openstack/ovn-ovsdb-nb-0/ovsdb.txt",
                                "cd+++++++++ pod-logs/openstack/ovn-ovsdb-sb-0/",
                                ">f+++++++++ pod-logs/openstack/ovn-ovsdb-sb-0/init.txt",
                                ">f+++++++++ pod-logs/openstack/ovn-ovsdb-sb-0/ovsdb.txt",
                                "cd+++++++++ pod-logs/openstack/percona-xtradb-haproxy-0/",
                                ">f+++++++++ pod-logs/openstack/percona-xtradb-haproxy-0/haproxy-init.txt",
                                ">f+++++++++ pod-logs/openstack/percona-xtradb-haproxy-0/haproxy.txt",
                                ">f+++++++++ pod-logs/openstack/percona-xtradb-haproxy-0/pxc-init.txt",
                                ">f+++++++++ pod-logs/openstack/percona-xtradb-haproxy-0/pxc-monit.txt",
                                "cd+++++++++ pod-logs/openstack/percona-xtradb-pxc-0/",
                                ">f+++++++++ pod-logs/openstack/percona-xtradb-pxc-0/exporter.txt",
                                ">f+++++++++ pod-logs/openstack/percona-xtradb-pxc-0/pxc-init.txt",
                                ">f+++++++++ pod-logs/openstack/percona-xtradb-pxc-0/pxc.txt",
                                "cd+++++++++ pod-logs/openstack/placement-api-75695696c6-brsxj/",
                                ">f+++++++++ pod-logs/openstack/placement-api-75695696c6-brsxj/init.txt",
                                ">f+++++++++ pod-logs/openstack/placement-api-75695696c6-brsxj/placement-api.txt",
                                "cd+++++++++ pod-logs/openstack/placement-db-init-89t92/",
                                ">f+++++++++ pod-logs/openstack/placement-db-init-89t92/init.txt",
                                ">f+++++++++ pod-logs/openstack/placement-db-init-89t92/placement-db-init-0.txt",
                                "cd+++++++++ pod-logs/openstack/placement-db-sync-nvqjv/",
                                ">f+++++++++ pod-logs/openstack/placement-db-sync-nvqjv/init.txt",
                                ">f+++++++++ pod-logs/openstack/placement-db-sync-nvqjv/placement-db-sync.txt",
                                "cd+++++++++ pod-logs/openstack/placement-ks-endpoints-jmfl7/",
                                ">f+++++++++ pod-logs/openstack/placement-ks-endpoints-jmfl7/init.txt",
                                ">f+++++++++ pod-logs/openstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-admin.txt",
                                ">f+++++++++ pod-logs/openstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-internal.txt",
                                ">f+++++++++ pod-logs/openstack/placement-ks-endpoints-jmfl7/placement-ks-endpoints-public.txt",
                                "cd+++++++++ pod-logs/openstack/placement-ks-service-qdjdz/",
                                ">f+++++++++ pod-logs/openstack/placement-ks-service-qdjdz/init.txt",
                                ">f+++++++++ pod-logs/openstack/placement-ks-service-qdjdz/placement-ks-service-registration.txt",
                                "cd+++++++++ pod-logs/openstack/placement-ks-user-blkn9/",
                                ">f+++++++++ pod-logs/openstack/placement-ks-user-blkn9/init.txt",
                                ">f+++++++++ pod-logs/openstack/placement-ks-user-blkn9/placement-ks-user.txt",
                                "cd+++++++++ pod-logs/openstack/pxc-operator-7cff949c8b-7zp4j/",
                                ">f+++++++++ pod-logs/openstack/pxc-operator-7cff949c8b-7zp4j/percona-xtradb-cluster-operator.txt",
                                "cd+++++++++ pod-logs/openstack/rabbitmq-barbican-server-0/",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-barbican-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-barbican-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/openstack/rabbitmq-cinder-server-0/",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-cinder-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-cinder-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/openstack/rabbitmq-cluster-operator-5448d56d95-vk9km/",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-cluster-operator-5448d56d95-vk9km/rabbitmq-cluster-operator.txt",
                                "cd+++++++++ pod-logs/openstack/rabbitmq-glance-server-0/",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-glance-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-glance-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/openstack/rabbitmq-heat-server-0/",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-heat-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-heat-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/openstack/rabbitmq-keystone-server-0/",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-keystone-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-keystone-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/openstack/rabbitmq-magnum-server-0/",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-magnum-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-magnum-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/openstack/rabbitmq-manila-server-0/",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-manila-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-manila-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/openstack/rabbitmq-messaging-topology-operator-7f8596f788-84l9x/",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-messaging-topology-operator-7f8596f788-84l9x/rabbitmq-cluster-operator.txt",
                                "cd+++++++++ pod-logs/openstack/rabbitmq-neutron-server-0/",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-neutron-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-neutron-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/openstack/rabbitmq-nova-server-0/",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-nova-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-nova-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/openstack/rabbitmq-octavia-server-0/",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-octavia-server-0/rabbitmq.txt",
                                ">f+++++++++ pod-logs/openstack/rabbitmq-octavia-server-0/setup-container.txt",
                                "cd+++++++++ pod-logs/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/",
                                ">f+++++++++ pod-logs/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/ceph-crash.txt",
                                ">f+++++++++ pod-logs/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/chown-container-data-dir.txt",
                                ">f+++++++++ pod-logs/openstack/rook-ceph-crashcollector-instance-754c646bfd-htxl9/make-container-crash-dir.txt",
                                "cd+++++++++ pod-logs/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/",
                                ">f+++++++++ pod-logs/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/chown-container-data-dir.txt",
                                ">f+++++++++ pod-logs/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/log-collector.txt",
                                ">f+++++++++ pod-logs/openstack/rook-ceph-rgw-ceph-a-699b8bdb59-kkwvw/rgw.txt",
                                "cd+++++++++ pod-logs/openstack/staffeln-api-6669c8779f-qgp4c/",
                                ">f+++++++++ pod-logs/openstack/staffeln-api-6669c8779f-qgp4c/init.txt",
                                ">f+++++++++ pod-logs/openstack/staffeln-api-6669c8779f-qgp4c/staffeln-api.txt",
                                "cd+++++++++ pod-logs/openstack/staffeln-conductor-7b5d99bcd4-ws4sl/",
                                ">f+++++++++ pod-logs/openstack/staffeln-conductor-7b5d99bcd4-ws4sl/staffeln-conductor.txt",
                                "cd+++++++++ pod-logs/openstack/staffeln-db-init-p4pq4/",
                                ">f+++++++++ pod-logs/openstack/staffeln-db-init-p4pq4/init.txt",
                                ">f+++++++++ pod-logs/openstack/staffeln-db-init-p4pq4/staffeln-db-init-0.txt",
                                "cd+++++++++ pod-logs/openstack/staffeln-db-sync-khzx8/",
                                ">f+++++++++ pod-logs/openstack/staffeln-db-sync-khzx8/init.txt",
                                ">f+++++++++ pod-logs/openstack/staffeln-db-sync-khzx8/staffeln-db-sync.txt",
                                "cd+++++++++ pod-logs/openstack/tempest-ks-user-kwbf6/",
                                ">f+++++++++ pod-logs/openstack/tempest-ks-user-kwbf6/init.txt",
                                ">f+++++++++ pod-logs/openstack/tempest-ks-user-kwbf6/tempest-ks-user.txt",
                                "cd+++++++++ pod-logs/openstack/tempest-run-tests-g5plh/",
                                ">f+++++++++ pod-logs/openstack/tempest-run-tests-g5plh/init.txt",
                                ">f+++++++++ pod-logs/openstack/tempest-run-tests-g5plh/tempest-run-tests-init.txt",
                                ">f+++++++++ pod-logs/openstack/tempest-run-tests-g5plh/tempest-run-tests.txt",
                                "cd+++++++++ pod-logs/openstack/valkey-node-0/",
                                ">f+++++++++ pod-logs/openstack/valkey-node-0/metrics.txt",
                                ">f+++++++++ pod-logs/openstack/valkey-node-0/sentinel.txt",
                                ">f+++++++++ pod-logs/openstack/valkey-node-0/valkey.txt",
                                "cd+++++++++ pod-logs/orc-system/",
                                "cd+++++++++ pod-logs/orc-system/orc-controller-manager-6cb597b5d4-glhcz/",
                                ">f+++++++++ pod-logs/orc-system/orc-controller-manager-6cb597b5d4-glhcz/manager.txt",
                                "cd+++++++++ pod-logs/rook-ceph/",
                                "cd+++++++++ pod-logs/rook-ceph/rook-ceph-operator-7b66cfb94c-tj94j/",
                                ">f+++++++++ pod-logs/rook-ceph/rook-ceph-operator-7b66cfb94c-tj94j/rook-ceph-operator.txt",
                                "cd+++++++++ pod-logs/secretgen-controller/",
                                "cd+++++++++ pod-logs/secretgen-controller/secretgen-controller-5cf976ccc7-szs5h/",
                                ">f+++++++++ pod-logs/secretgen-controller/secretgen-controller-5cf976ccc7-szs5h/secretgen-controller.txt"
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-00000000001f",
                        "name": "gather-pod-logs",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-pod-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:11.672887Z",
                            "start": "2026-03-01T22:31:10.983851Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-000000000024",
                        "name": "Downloads pod logs to executor"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/tmp/logs/prometheus",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/tmp/logs/prometheus",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": null,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/tmp/logs/prometheus",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/tmp/logs/prometheus",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-000000000026",
                        "name": "gather-prom-metrics",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-prom-metrics"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:11.888047Z",
                            "start": "2026-03-01T22:31:11.685976Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-000000000028",
                        "name": "creating directory for helm release descriptions"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -e\nNAMESPACES=$(kubectl get namespaces -o json | jq -r '.items[].metadata.name')\nfor NS in $NAMESPACES; do\n  SERVICES=$(kubectl get svc -n $NS -o json | jq -r '.items[] | select(.spec.ports[].name==\"metrics\") | .metadata.name')\n  for SVC in $SERVICES; do\n    PORT=$(kubectl get svc $SVC -n $NS -o json | jq -r '.spec.ports[] | select(.name==\"metrics\") | .port')\n    echo \"Scraping $SVC.$NS:$PORT/metrics:\"\n    curl \"$SVC.$NS:$PORT/metrics\" >> \"/tmp/logs\"/prometheus/$NS-$SVC.txt || true\n  done\ndone",
                            "delta": "0:00:01.668478",
                            "end": "2026-03-01 22:31:13.778429",
                            "failed": true,
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -e\nNAMESPACES=$(kubectl get namespaces -o json | jq -r '.items[].metadata.name')\nfor NS in $NAMESPACES; do\n  SERVICES=$(kubectl get svc -n $NS -o json | jq -r '.items[] | select(.spec.ports[].name==\"metrics\") | .metadata.name')\n  for SVC in $SERVICES; do\n    PORT=$(kubectl get svc $SVC -n $NS -o json | jq -r '.spec.ports[] | select(.name==\"metrics\") | .port')\n    echo \"Scraping $SVC.$NS:$PORT/metrics:\"\n    curl \"$SVC.$NS:$PORT/metrics\" >> \"/tmp/logs\"/prometheus/$NS-$SVC.txt || true\n  done\ndone",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0011-aaa1-afc9-000000000029-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "non-zero return code",
                            "rc": 5,
                            "start": "2026-03-01 22:31:12.109951",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "Scraping ingress-nginx-controller-metrics.ingress-nginx:10254/metrics:\n  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current\n                                 Dload  Upload   Total   Spent    Left  Speed\n\r  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0curl: (6) Could not resolve host: ingress-nginx-controller-metrics.ingress-nginx\nScraping kube-dns.kube-system:9153/metrics:\n  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current\n                                 Dload  Upload   Total   Spent    Left  Speed\n\r  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0curl: (6) Could not resolve host: kube-dns.kube-system\njq: error (at <stdin>:953): Cannot iterate over null (null)",
                            "stdout_lines": [
                                "Scraping ingress-nginx-controller-metrics.ingress-nginx:10254/metrics:",
                                "  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current",
                                "                                 Dload  Upload   Total   Spent    Left  Speed",
                                "",
                                "  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0curl: (6) Could not resolve host: ingress-nginx-controller-metrics.ingress-nginx",
                                "Scraping kube-dns.kube-system:9153/metrics:",
                                "  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current",
                                "                                 Dload  Upload   Total   Spent    Left  Speed",
                                "",
                                "  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0curl: (6) Could not resolve host: kube-dns.kube-system",
                                "jq: error (at <stdin>:953): Cannot iterate over null (null)"
                            ],
                            "zuul_log_id": "0242ac17-0011-aaa1-afc9-000000000029-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-000000000026",
                        "name": "gather-prom-metrics",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-prom-metrics"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:13.937712Z",
                            "start": "2026-03-01T22:31:11.918119Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-000000000029",
                        "name": "Get metrics from exporter services in all namespaces"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -e\nmgr_endpoints=$(kubectl get endpoints -n ceph -l component=manager -o json | jq -r '.items[].subsets[].addresses[].ip')\necho \"ceph-mgr endpoints: $mgr_endpoints\"\nfor endpoint in $mgr_endpoints; do\n  echo \"checking ceph-mgr at $endpoint\"\n  metrics_curl=\"curl $endpoint:9283/metrics\"\n  op=$(eval \"$metrics_curl\")\n  if [[ -n $op ]]; then\n    curl $endpoint:9283/metrics >> \"/tmp/logs\"/prometheus/ceph-ceph-mgr.txt\n    break\n  else\n    echo \"$endpoint is a standby ceph-mgr. Trying next endpoint\"\n  fi\ndone",
                            "delta": "0:00:00.100168",
                            "end": "2026-03-01 22:31:14.295017",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -e\nmgr_endpoints=$(kubectl get endpoints -n ceph -l component=manager -o json | jq -r '.items[].subsets[].addresses[].ip')\necho \"ceph-mgr endpoints: $mgr_endpoints\"\nfor endpoint in $mgr_endpoints; do\n  echo \"checking ceph-mgr at $endpoint\"\n  metrics_curl=\"curl $endpoint:9283/metrics\"\n  op=$(eval \"$metrics_curl\")\n  if [[ -n $op ]]; then\n    curl $endpoint:9283/metrics >> \"/tmp/logs\"/prometheus/ceph-ceph-mgr.txt\n    break\n  else\n    echo \"$endpoint is a standby ceph-mgr. Trying next endpoint\"\n  fi\ndone",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0011-aaa1-afc9-00000000002a-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-03-01 22:31:14.194849",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "ceph-mgr endpoints: ",
                            "stdout_lines": [
                                "ceph-mgr endpoints: "
                            ],
                            "zuul_log_id": "0242ac17-0011-aaa1-afc9-00000000002a-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-000000000026",
                        "name": "gather-prom-metrics",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-prom-metrics"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:14.514349Z",
                            "start": "2026-03-01T22:31:13.998765Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-00000000002a",
                        "name": "Get ceph metrics from ceph-mgr"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -e\nNAMESPACE=\"osh-infra\"\nAPP_LABEL=\"fluentd\"\nPODS=$(kubectl get pods -n $NAMESPACE -l application=$APP_LABEL -o json | jq -r '.items[].metadata.name')\nfor POD in $PODS; do\n  IP=$(kubectl get pod -n $NAMESPACE $POD -o json | jq -r '.status.podIP')\n  PORT=$(kubectl get pod -n $NAMESPACE $POD -o json |  jq -r '.spec.containers[0].ports[] | select(.name==\"metrics\") | .containerPort')\n  echo \"Scraping $POD at $IP:$PORT/metrics\"\n  curl \"$IP:$PORT/metrics\" >> \"/tmp/logs\"/prometheus/$POD.txt || true\ndone",
                            "delta": "0:00:00.111183",
                            "end": "2026-03-01 22:31:14.848892",
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -e\nNAMESPACE=\"osh-infra\"\nAPP_LABEL=\"fluentd\"\nPODS=$(kubectl get pods -n $NAMESPACE -l application=$APP_LABEL -o json | jq -r '.items[].metadata.name')\nfor POD in $PODS; do\n  IP=$(kubectl get pod -n $NAMESPACE $POD -o json | jq -r '.status.podIP')\n  PORT=$(kubectl get pod -n $NAMESPACE $POD -o json |  jq -r '.spec.containers[0].ports[] | select(.name==\"metrics\") | .containerPort')\n  echo \"Scraping $POD at $IP:$PORT/metrics\"\n  curl \"$IP:$PORT/metrics\" >> \"/tmp/logs\"/prometheus/$POD.txt || true\ndone",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0011-aaa1-afc9-00000000002b-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "",
                            "rc": 0,
                            "start": "2026-03-01 22:31:14.737709",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "",
                            "stdout_lines": [],
                            "zuul_log_id": "0242ac17-0011-aaa1-afc9-00000000002b-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-000000000026",
                        "name": "gather-prom-metrics",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-prom-metrics"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:15.066432Z",
                            "start": "2026-03-01T22:31:14.544694Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-00000000002b",
                        "name": "Get metrics from fluentd pods"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "changed": true,
                            "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --rsh='/usr/bin/ssh -S none -o Port=22 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' --rsync-path='sudo -u root rsync' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.156:/tmp/logs/prometheus /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/instance",
                            "invocation": {
                                "module_args": {
                                    "_local_rsync_password": null,
                                    "_local_rsync_path": "rsync",
                                    "_ssh_args": null,
                                    "_substitute_controller": false,
                                    "archive": true,
                                    "checksum": false,
                                    "compress": true,
                                    "copy_links": false,
                                    "delay_updates": true,
                                    "delete": false,
                                    "dest": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/instance",
                                    "dest_port": 22,
                                    "dirs": false,
                                    "existing_only": false,
                                    "group": null,
                                    "link_dest": null,
                                    "links": null,
                                    "mode": "pull",
                                    "owner": null,
                                    "partial": false,
                                    "perms": null,
                                    "private_key": null,
                                    "recursive": null,
                                    "rsync_opts": [],
                                    "rsync_path": "sudo -u root rsync",
                                    "rsync_timeout": 0,
                                    "set_remote_user": true,
                                    "src": "zuul@199.204.45.156:/tmp/logs/prometheus",
                                    "ssh_connection_multiplexing": false,
                                    "times": null,
                                    "use_ssh_args": false,
                                    "verify_host": false
                                }
                            },
                            "msg": "cd+++++++++ prometheus/\n>f+++++++++ prometheus/ingress-nginx-ingress-nginx-controller-metrics.txt\n>f+++++++++ prometheus/kube-system-kube-dns.txt\n",
                            "rc": 0,
                            "stdout_lines": [
                                "cd+++++++++ prometheus/",
                                ">f+++++++++ prometheus/ingress-nginx-ingress-nginx-controller-metrics.txt",
                                ">f+++++++++ prometheus/kube-system-kube-dns.txt"
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-000000000026",
                        "name": "gather-prom-metrics",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-prom-metrics"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:15.568907Z",
                            "start": "2026-03-01T22:31:15.073686Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-00000000002c",
                        "name": "Downloads logs to executor"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "diff": {
                                "after": {
                                    "path": "/tmp/logs/selenium",
                                    "state": "directory"
                                },
                                "before": {
                                    "path": "/tmp/logs/selenium",
                                    "state": "absent"
                                }
                            },
                            "gid": 0,
                            "group": "root",
                            "invocation": {
                                "module_args": {
                                    "_diff_peek": null,
                                    "_original_basename": null,
                                    "access_time": null,
                                    "access_time_format": "%Y%m%d%H%M.%S",
                                    "attributes": null,
                                    "follow": true,
                                    "force": false,
                                    "group": null,
                                    "mode": null,
                                    "modification_time": null,
                                    "modification_time_format": "%Y%m%d%H%M.%S",
                                    "owner": null,
                                    "path": "/tmp/logs/selenium",
                                    "recurse": false,
                                    "selevel": null,
                                    "serole": null,
                                    "setype": null,
                                    "seuser": null,
                                    "src": null,
                                    "state": "directory",
                                    "unsafe_writes": false
                                }
                            },
                            "mode": "0755",
                            "owner": "root",
                            "path": "/tmp/logs/selenium",
                            "size": 4096,
                            "state": "directory",
                            "uid": 0
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-00000000002e",
                        "name": "gather-selenium-data",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-selenium-data"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:15.789754Z",
                            "start": "2026-03-01T22:31:15.580522Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-000000000030",
                        "name": "creating directory for helm release descriptions"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "cmd": "set -x\ncp /tmp/artifacts/* /tmp/logs/selenium/.",
                            "delta": "0:00:00.009415",
                            "end": "2026-03-01 22:31:16.020960",
                            "failed": true,
                            "invocation": {
                                "module_args": {
                                    "_raw_params": "set -x\ncp /tmp/artifacts/* /tmp/logs/selenium/.",
                                    "_uses_shell": true,
                                    "argv": null,
                                    "chdir": null,
                                    "creates": null,
                                    "executable": "/bin/bash",
                                    "expand_argument_vars": true,
                                    "removes": null,
                                    "stdin": null,
                                    "stdin_add_newline": true,
                                    "strip_empty_ends": true,
                                    "zuul_ansible_split_streams": false,
                                    "zuul_log_id": "0242ac17-0011-aaa1-afc9-000000000031-1-instance",
                                    "zuul_no_log": false,
                                    "zuul_output_max_bytes": 1073741824
                                }
                            },
                            "msg": "non-zero return code",
                            "rc": 1,
                            "start": "2026-03-01 22:31:16.011545",
                            "stderr": "",
                            "stderr_lines": [],
                            "stdout": "+ cp '/tmp/artifacts/*' /tmp/logs/selenium/.\ncp: cannot stat '/tmp/artifacts/*': No such file or directory",
                            "stdout_lines": [
                                "+ cp '/tmp/artifacts/*' /tmp/logs/selenium/.",
                                "cp: cannot stat '/tmp/artifacts/*': No such file or directory"
                            ],
                            "zuul_log_id": "0242ac17-0011-aaa1-afc9-000000000031-1-instance"
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-00000000002e",
                        "name": "gather-selenium-data",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-selenium-data"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:16.335387Z",
                            "start": "2026-03-01T22:31:15.816247Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-000000000031",
                        "name": "Get selenium data"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "changed": true,
                            "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --rsh='/usr/bin/ssh -S none -o Port=22 -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' --rsync-path='sudo -u root rsync' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.156:/tmp/logs/selenium /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/instance",
                            "invocation": {
                                "module_args": {
                                    "_local_rsync_password": null,
                                    "_local_rsync_path": "rsync",
                                    "_ssh_args": null,
                                    "_substitute_controller": false,
                                    "archive": true,
                                    "checksum": false,
                                    "compress": true,
                                    "copy_links": false,
                                    "delay_updates": true,
                                    "delete": false,
                                    "dest": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/instance",
                                    "dest_port": 22,
                                    "dirs": false,
                                    "existing_only": false,
                                    "group": null,
                                    "link_dest": null,
                                    "links": null,
                                    "mode": "pull",
                                    "owner": null,
                                    "partial": false,
                                    "perms": null,
                                    "private_key": null,
                                    "recursive": null,
                                    "rsync_opts": [],
                                    "rsync_path": "sudo -u root rsync",
                                    "rsync_timeout": 0,
                                    "set_remote_user": true,
                                    "src": "zuul@199.204.45.156:/tmp/logs/selenium",
                                    "ssh_connection_multiplexing": false,
                                    "times": null,
                                    "use_ssh_args": false,
                                    "verify_host": false
                                }
                            },
                            "msg": "cd+++++++++ selenium/\n",
                            "rc": 0,
                            "stdout_lines": [
                                "cd+++++++++ selenium/"
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-aaa1-afc9-00000000002e",
                        "name": "gather-selenium-data",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/untrusted/project_4/opendev.org/openstack/openstack-helm/roles/gather-selenium-data"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:16.825420Z",
                            "start": "2026-03-01T22:31:16.340694Z"
                        },
                        "id": "0242ac17-0011-aaa1-afc9-000000000032",
                        "name": "Downloads logs to executor"
                    }
                }
            ]
        }
    ],
    "stats": {
        "instance": {
            "changed": 23,
            "failures": 0,
            "ignored": 3,
            "ok": 23,
            "rescued": 0,
            "skipped": 0,
            "unreachable": 0
        }
    },
    "trusted": false
},
{
    "branch": "main",
    "index": "1",
    "phase": "post",
    "playbook": "github.com/vexxhost/zuul-config/playbooks/base/post.yaml",
    "plays": [
        {
            "play": {
                "duration": {
                    "end": "2026-03-01T22:31:20.734258Z",
                    "start": "2026-03-01T22:31:17.604692Z"
                },
                "id": "0242ac17-0011-cd98-a8d7-000000000002",
                "name": "all"
            },
            "tasks": [
                {
                    "hosts": {
                        "instance": {
                            "action": "set_fact",
                            "changed": false,
                            "false_condition": "groups['all'] | length > 1",
                            "skip_reason": "Conditional result was False",
                            "skipped": true
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-cd98-a8d7-000000000006",
                        "name": "fetch-output",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/fetch-output"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:17.661509Z",
                            "start": "2026-03-01T22:31:17.615642Z"
                        },
                        "id": "0242ac17-0011-cd98-a8d7-000000000008",
                        "name": "Set log path for multiple nodes"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "set_fact",
                            "ansible_facts": {
                                "log_path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs"
                            },
                            "changed": false
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-cd98-a8d7-000000000006",
                        "name": "fetch-output",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/fetch-output"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:17.712842Z",
                            "start": "2026-03-01T22:31:17.669573Z"
                        },
                        "id": "0242ac17-0011-cd98-a8d7-000000000009",
                        "name": "Set log path for single node"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "file",
                            "changed": true,
                            "msg": "All items completed",
                            "results": [
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": false,
                                    "diff": {
                                        "after": {
                                            "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs"
                                        },
                                        "before": {
                                            "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs"
                                        }
                                    },
                                    "failed": false,
                                    "gid": 0,
                                    "group": "root",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": 493,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "root",
                                    "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 0,
                                    "zj_output_dir": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs"
                                },
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": true,
                                    "diff": {
                                        "after": {
                                            "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/artifacts",
                                            "state": "directory"
                                        },
                                        "before": {
                                            "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/artifacts",
                                            "state": "absent"
                                        }
                                    },
                                    "failed": false,
                                    "gid": 0,
                                    "group": "root",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": 493,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/artifacts",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "root",
                                    "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/artifacts",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 0,
                                    "zj_output_dir": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/artifacts"
                                },
                                {
                                    "ansible_loop_var": "zj_output_dir",
                                    "changed": true,
                                    "diff": {
                                        "after": {
                                            "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/docs",
                                            "state": "directory"
                                        },
                                        "before": {
                                            "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/docs",
                                            "state": "absent"
                                        }
                                    },
                                    "failed": false,
                                    "gid": 0,
                                    "group": "root",
                                    "invocation": {
                                        "module_args": {
                                            "_diff_peek": null,
                                            "_original_basename": null,
                                            "access_time": null,
                                            "access_time_format": "%Y%m%d%H%M.%S",
                                            "attributes": null,
                                            "follow": true,
                                            "force": false,
                                            "group": null,
                                            "mode": 493,
                                            "modification_time": null,
                                            "modification_time_format": "%Y%m%d%H%M.%S",
                                            "owner": null,
                                            "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/docs",
                                            "recurse": false,
                                            "selevel": null,
                                            "serole": null,
                                            "setype": null,
                                            "seuser": null,
                                            "src": null,
                                            "state": "directory",
                                            "unsafe_writes": false
                                        }
                                    },
                                    "mode": "0755",
                                    "owner": "root",
                                    "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/docs",
                                    "size": 4096,
                                    "state": "directory",
                                    "uid": 0,
                                    "zj_output_dir": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/docs"
                                }
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-cd98-a8d7-000000000006",
                        "name": "fetch-output",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/fetch-output"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:18.532704Z",
                            "start": "2026-03-01T22:31:17.719819Z"
                        },
                        "id": "0242ac17-0011-cd98-a8d7-00000000000a",
                        "name": "Ensure local output dirs"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "synchronize",
                            "changed": true,
                            "msg": "All items completed",
                            "results": [
                                {
                                    "ansible_loop_var": "zj_output",
                                    "changed": true,
                                    "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --no-owner --no-group --rsh='/usr/bin/ssh -S none -o Port=22' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.156:/home/zuul/zuul-output/logs/ /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/",
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_local_rsync_password": null,
                                            "_local_rsync_path": "rsync",
                                            "_ssh_args": null,
                                            "_substitute_controller": false,
                                            "archive": true,
                                            "checksum": false,
                                            "compress": true,
                                            "copy_links": false,
                                            "delay_updates": true,
                                            "delete": false,
                                            "dest": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/",
                                            "dest_port": 22,
                                            "dirs": false,
                                            "existing_only": false,
                                            "group": false,
                                            "link_dest": null,
                                            "links": null,
                                            "mode": "pull",
                                            "owner": false,
                                            "partial": false,
                                            "perms": null,
                                            "private_key": null,
                                            "recursive": null,
                                            "rsync_opts": [],
                                            "rsync_path": null,
                                            "rsync_timeout": 0,
                                            "set_remote_user": true,
                                            "src": "zuul@199.204.45.156:/home/zuul/zuul-output/logs/",
                                            "ssh_connection_multiplexing": false,
                                            "times": null,
                                            "use_ssh_args": false,
                                            "verify_host": true
                                        }
                                    },
                                    "msg": ".d..t...... ./\n",
                                    "rc": 0,
                                    "stdout_lines": [
                                        ".d..t...... ./"
                                    ],
                                    "zj_output": {
                                        "dest": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs",
                                        "src": "logs"
                                    }
                                },
                                {
                                    "ansible_loop_var": "zj_output",
                                    "changed": true,
                                    "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --no-owner --no-group --rsh='/usr/bin/ssh -S none -o Port=22' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.156:/home/zuul/zuul-output/artifacts/ /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/artifacts/",
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_local_rsync_password": null,
                                            "_local_rsync_path": "rsync",
                                            "_ssh_args": null,
                                            "_substitute_controller": false,
                                            "archive": true,
                                            "checksum": false,
                                            "compress": true,
                                            "copy_links": false,
                                            "delay_updates": true,
                                            "delete": false,
                                            "dest": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/artifacts/",
                                            "dest_port": 22,
                                            "dirs": false,
                                            "existing_only": false,
                                            "group": false,
                                            "link_dest": null,
                                            "links": null,
                                            "mode": "pull",
                                            "owner": false,
                                            "partial": false,
                                            "perms": null,
                                            "private_key": null,
                                            "recursive": null,
                                            "rsync_opts": [],
                                            "rsync_path": null,
                                            "rsync_timeout": 0,
                                            "set_remote_user": true,
                                            "src": "zuul@199.204.45.156:/home/zuul/zuul-output/artifacts/",
                                            "ssh_connection_multiplexing": false,
                                            "times": null,
                                            "use_ssh_args": false,
                                            "verify_host": true
                                        }
                                    },
                                    "msg": ".d..t...... ./\n",
                                    "rc": 0,
                                    "stdout_lines": [
                                        ".d..t...... ./"
                                    ],
                                    "zj_output": {
                                        "dest": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/artifacts",
                                        "src": "artifacts"
                                    }
                                },
                                {
                                    "ansible_loop_var": "zj_output",
                                    "changed": true,
                                    "cmd": "/usr/bin/rsync --delay-updates -F --compress --archive --no-owner --no-group --rsh='/usr/bin/ssh -S none -o Port=22' --out-format='<<CHANGED>>%i %n%L' zuul@199.204.45.156:/home/zuul/zuul-output/docs/ /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/docs/",
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_local_rsync_password": null,
                                            "_local_rsync_path": "rsync",
                                            "_ssh_args": null,
                                            "_substitute_controller": false,
                                            "archive": true,
                                            "checksum": false,
                                            "compress": true,
                                            "copy_links": false,
                                            "delay_updates": true,
                                            "delete": false,
                                            "dest": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/docs/",
                                            "dest_port": 22,
                                            "dirs": false,
                                            "existing_only": false,
                                            "group": false,
                                            "link_dest": null,
                                            "links": null,
                                            "mode": "pull",
                                            "owner": false,
                                            "partial": false,
                                            "perms": null,
                                            "private_key": null,
                                            "recursive": null,
                                            "rsync_opts": [],
                                            "rsync_path": null,
                                            "rsync_timeout": 0,
                                            "set_remote_user": true,
                                            "src": "zuul@199.204.45.156:/home/zuul/zuul-output/docs/",
                                            "ssh_connection_multiplexing": false,
                                            "times": null,
                                            "use_ssh_args": false,
                                            "verify_host": true
                                        }
                                    },
                                    "msg": ".d..t...... ./\n",
                                    "rc": 0,
                                    "stdout_lines": [
                                        ".d..t...... ./"
                                    ],
                                    "zj_output": {
                                        "dest": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/docs",
                                        "src": "docs"
                                    }
                                }
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-cd98-a8d7-000000000006",
                        "name": "fetch-output",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/fetch-output"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:20.061532Z",
                            "start": "2026-03-01T22:31:18.542323Z"
                        },
                        "id": "0242ac17-0011-cd98-a8d7-00000000000c",
                        "name": "Collect logs, artifacts and docs"
                    }
                },
                {
                    "hosts": {
                        "instance": {
                            "action": "shell",
                            "changed": true,
                            "msg": "All items completed",
                            "results": [
                                {
                                    "ansible_loop_var": "zj_item",
                                    "changed": true,
                                    "cmd": "if [ -n \"$(find /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/artifacts -mindepth 1)\" ] ; then\n  # Only create target directory if it is needed.\n  # Do not fail if it is already there.\n  mkdir -p /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/artifacts\n  # Leave the original directory behind so that other roles\n  # operating on the interface directories can simply no-op.\n  mv -f /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/artifacts/* /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/artifacts\nfi\n",
                                    "delta": "0:00:00.007669",
                                    "end": "2026-03-01 22:31:20.463652",
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_raw_params": "if [ -n \"$(find /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/artifacts -mindepth 1)\" ] ; then\n  # Only create target directory if it is needed.\n  # Do not fail if it is already there.\n  mkdir -p /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/artifacts\n  # Leave the original directory behind so that other roles\n  # operating on the interface directories can simply no-op.\n  mv -f /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/artifacts/* /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/artifacts\nfi\n",
                                            "_uses_shell": true,
                                            "argv": null,
                                            "chdir": null,
                                            "creates": null,
                                            "executable": null,
                                            "expand_argument_vars": true,
                                            "removes": null,
                                            "stdin": null,
                                            "stdin_add_newline": true,
                                            "strip_empty_ends": true,
                                            "zuul_ansible_split_streams": false,
                                            "zuul_log_id": "in-loop-ignore",
                                            "zuul_no_log": false,
                                            "zuul_output_max_bytes": 1073741824
                                        }
                                    },
                                    "msg": "",
                                    "rc": 0,
                                    "start": "2026-03-01 22:31:20.455983",
                                    "stderr": "",
                                    "stderr_lines": [],
                                    "stdout": "",
                                    "stdout_lines": [],
                                    "zj_item": "artifacts",
                                    "zuul_log_id": "in-loop-ignore"
                                },
                                {
                                    "ansible_loop_var": "zj_item",
                                    "changed": true,
                                    "cmd": "if [ -n \"$(find /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/docs -mindepth 1)\" ] ; then\n  # Only create target directory if it is needed.\n  # Do not fail if it is already there.\n  mkdir -p /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/docs\n  # Leave the original directory behind so that other roles\n  # operating on the interface directories can simply no-op.\n  mv -f /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/docs/* /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/docs\nfi\n",
                                    "delta": "0:00:00.007318",
                                    "end": "2026-03-01 22:31:20.690017",
                                    "failed": false,
                                    "invocation": {
                                        "module_args": {
                                            "_raw_params": "if [ -n \"$(find /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/docs -mindepth 1)\" ] ; then\n  # Only create target directory if it is needed.\n  # Do not fail if it is already there.\n  mkdir -p /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/docs\n  # Leave the original directory behind so that other roles\n  # operating on the interface directories can simply no-op.\n  mv -f /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/docs/* /var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/work/logs/docs\nfi\n",
                                            "_uses_shell": true,
                                            "argv": null,
                                            "chdir": null,
                                            "creates": null,
                                            "executable": null,
                                            "expand_argument_vars": true,
                                            "removes": null,
                                            "stdin": null,
                                            "stdin_add_newline": true,
                                            "strip_empty_ends": true,
                                            "zuul_ansible_split_streams": false,
                                            "zuul_log_id": "in-loop-ignore",
                                            "zuul_no_log": false,
                                            "zuul_output_max_bytes": 1073741824
                                        }
                                    },
                                    "msg": "",
                                    "rc": 0,
                                    "start": "2026-03-01 22:31:20.682699",
                                    "stderr": "",
                                    "stderr_lines": [],
                                    "stdout": "",
                                    "stdout_lines": [],
                                    "zj_item": "docs",
                                    "zuul_log_id": "in-loop-ignore"
                                }
                            ]
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-cd98-a8d7-00000000000f",
                        "name": "merge-output-to-logs",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/merge-output-to-logs"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:20.734258Z",
                            "start": "2026-03-01T22:31:20.075160Z"
                        },
                        "id": "0242ac17-0011-cd98-a8d7-000000000011",
                        "name": "Move artifacts and docs to logs dir"
                    }
                }
            ]
        },
        {
            "play": {
                "duration": {
                    "end": "2026-03-01T22:31:21.171845Z",
                    "start": "2026-03-01T22:31:20.745992Z"
                },
                "id": "0242ac17-0011-cd98-a8d7-000000000013",
                "name": "all"
            },
            "tasks": [
                {
                    "hosts": {
                        "instance": {
                            "action": "authorized_key",
                            "changed": true,
                            "comment": null,
                            "exclusive": false,
                            "follow": false,
                            "invocation": {
                                "module_args": {
                                    "changed": true,
                                    "comment": null,
                                    "exclusive": false,
                                    "follow": false,
                                    "key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDPfwECylRYVh2wnK/+KlS5o86JBjkIQyEEYbI2SOozJMFOPh426hBYiUkfbWWE5thTsWfbU3n3LqHWL1qxx1aBSMkVyFVWoO8IlpkIHvNL/EKBIzI0BF7FQ1uNfOXz7CV17FqM34DkbVH0HLbT5rO+94slg28bFZhXcuezQG1XqqlqsVsChsYX/ovkqP7spOQ+S2Ne5CQq3i8xYdzSR4xA024sG2xW0oP0v1Chwfds0ZOecgSjEQalsrbQgWfRLb4SjAPc94AK5fIga5HwIcDKIvXuajxEqagCq7mlnsIbPfXY0NOCxB1FQ/Fr0Qun+dhr5Pa6VwcL0FNQ6COC0LTmlCfCwNP1ELOhyPXzumFXrGqrWPzKrGUMQ+nuPoPgKohkW0rN+YbrG2QeTzUqYfdkwS5dBNZmr10jZKI7WyFAf3dM8GWdDONDxWx4owIyqtBSrvObBZUzXkYqzY9XXBeXPdvbKKDIiFKVsAAnSWIVBrc0uCKNwjbXPZs931C4noc= zuul-build-sshkey",
                                    "key_options": null,
                                    "keyfile": "/home/zuul/.ssh/authorized_keys",
                                    "manage_dir": true,
                                    "path": null,
                                    "state": "absent",
                                    "user": "zuul",
                                    "validate_certs": true
                                }
                            },
                            "key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDPfwECylRYVh2wnK/+KlS5o86JBjkIQyEEYbI2SOozJMFOPh426hBYiUkfbWWE5thTsWfbU3n3LqHWL1qxx1aBSMkVyFVWoO8IlpkIHvNL/EKBIzI0BF7FQ1uNfOXz7CV17FqM34DkbVH0HLbT5rO+94slg28bFZhXcuezQG1XqqlqsVsChsYX/ovkqP7spOQ+S2Ne5CQq3i8xYdzSR4xA024sG2xW0oP0v1Chwfds0ZOecgSjEQalsrbQgWfRLb4SjAPc94AK5fIga5HwIcDKIvXuajxEqagCq7mlnsIbPfXY0NOCxB1FQ/Fr0Qun+dhr5Pa6VwcL0FNQ6COC0LTmlCfCwNP1ELOhyPXzumFXrGqrWPzKrGUMQ+nuPoPgKohkW0rN+YbrG2QeTzUqYfdkwS5dBNZmr10jZKI7WyFAf3dM8GWdDONDxWx4owIyqtBSrvObBZUzXkYqzY9XXBeXPdvbKKDIiFKVsAAnSWIVBrc0uCKNwjbXPZs931C4noc= zuul-build-sshkey",
                            "key_options": null,
                            "keyfile": "/home/zuul/.ssh/authorized_keys",
                            "manage_dir": true,
                            "path": null,
                            "state": "absent",
                            "user": "zuul",
                            "validate_certs": true
                        }
                    },
                    "role": {
                        "id": "0242ac17-0011-cd98-a8d7-000000000016",
                        "name": "remove-build-sshkey",
                        "path": "/var/lib/zuul/builds/5458773516a34c28bdbe6e02248c9f88/trusted/project_1/opendev.org/zuul/zuul-jobs/roles/remove-build-sshkey"
                    },
                    "task": {
                        "duration": {
                            "end": "2026-03-01T22:31:21.171845Z",
                            "start": "2026-03-01T22:31:20.753396Z"
                        },
                        "id": "0242ac17-0011-cd98-a8d7-000000000018",
                        "name": "Remove the build SSH key from all nodes"
                    }
                }
            ]
        }
    ],
    "stats": {
        "instance": {
            "changed": 4,
            "failures": 0,
            "ignored": 0,
            "ok": 5,
            "rescued": 0,
            "skipped": 1,
            "unreachable": 0
        }
    },
    "trusted": true
}
]
