2026-04-01T02:20:50.813 INFO:root:teuthology version: 1.2.4.dev37+ga59626679 2026-04-01T02:20:50.819 DEBUG:teuthology.report:Pushing job info to http://localhost:8080 2026-04-01T02:20:50.840 INFO:teuthology.run:Config: archive_path: /archive/supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps/4640 branch: wip-sse-s3-on-v20.2.0 description: rgw/dedup/{beast bluestore-bitmap fixed-3-rgw ignore-pg-availability overrides supported-distros/{rocky_latest} tasks/{0-install test_dedup}} email: null first_in_suite: false flavor: default job_id: '4640' last_in_suite: false machine_type: vps name: supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps no_nested_subset: false openstack: - volumes: count: 4 size: 10 os_type: rocky os_version: '9.7' overrides: admin_socket: branch: wip-sse-s3-on-v20.2.0 ansible.cephlab: branch: main repo: https://github.com/kshtsk/ceph-cm-ansible.git skip_tags: nagios,monitoring-scripts,hostname,pubkeys,zap,sudoers,kerberos,ntp-client,resolvconf,cpan,nfs vars: logical_volumes: lv_1: scratch_dev: true size: 25%VG vg: vg_nvme lv_2: scratch_dev: true size: 25%VG vg: vg_nvme lv_3: scratch_dev: true size: 25%VG vg: vg_nvme lv_4: scratch_dev: true size: 25%VG vg: vg_nvme timezone: UTC volume_groups: vg_nvme: pvs: /dev/vdb,/dev/vdc,/dev/vdd,/dev/vde ceph: conf: client: debug rgw: 20 debug rgw dedup: 20 setgroup: ceph setuser: ceph global: osd_max_pg_log_entries: 10 osd_min_pg_log_entries: 10 mgr: debug mgr: 20 debug ms: 1 mon: debug mon: 20 debug ms: 1 debug paxos: 20 osd: bdev async discard: true bdev enable discard: true bluestore allocator: bitmap bluestore block size: 96636764160 bluestore fsck on mount: true debug bluefs: 1/20 debug bluestore: 1/20 debug ms: 1 debug osd: 20 debug rocksdb: 4/10 mon osd backfillfull_ratio: 0.85 mon osd full ratio: 0.9 mon osd nearfull ratio: 0.8 osd failsafe full ratio: 0.95 osd mclock iops capacity threshold hdd: 49000 osd objectstore: bluestore osd shutdown pgref assert: true flavor: default fs: xfs log-ignorelist: - \(MDS_ALL_DOWN\) - \(MDS_UP_LESS_THAN_MAX\) - \(PG_AVAILABILITY\) - \(PG_DEGRADED\) - \(POOL_APP_NOT_ENABLED\) - not have an application enabled sha1: 0597158282e6d69429e60df2354a6c8eed0e5bce ceph-deploy: bluestore: true conf: client: log file: /var/log/ceph/ceph-$name.$pid.log mon: {} osd: bdev async discard: true bdev enable discard: true bluestore block size: 96636764160 bluestore fsck on mount: true debug bluefs: 1/20 debug bluestore: 1/20 debug rocksdb: 4/10 mon osd backfillfull_ratio: 0.85 mon osd full ratio: 0.9 mon osd nearfull ratio: 0.8 osd failsafe full ratio: 0.95 osd objectstore: bluestore fs: xfs cephadm: cephadm_binary_url: https://download.ceph.com/rpm-20.2.0/el9/noarch/cephadm containers: image: harbor.clyso.com/custom-ceph/ceph/ceph:sse-s3-kmip-preview-not-for-production-1 install: ceph: flavor: default sha1: 0597158282e6d69429e60df2354a6c8eed0e5bce extra_system_packages: deb: - python3-jmespath - python3-xmltodict - s3cmd rpm: - bzip2 - perl-Test-Harness - python3-jmespath - python3-xmltodict - s3cmd repos: - name: ceph-source priority: 1 url: https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/SRPMS - name: ceph-noarch priority: 1 url: https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/noarch - name: ceph priority: 1 url: https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/x86_64 rgw: frontend: beast storage classes: FROZEN: null LUKEWARM: null selinux: allowlist: - scontext=system_u:system_r:getty_t:s0 thrashosds: bdev_inject_crash: 2 bdev_inject_crash_probability: 0.5 workunit: branch: tt-20.2.0-sse-s3-kmip-preview-not-for-production-1 sha1: 99e8bef8f767b591604d6078b7861a00c2936d53 owner: supriti priority: 1000 repo: https://github.com/ceph/ceph.git roles: - - mon.a - mon.c - mgr.y - osd.0 - osd.1 - osd.2 - osd.3 - client.0 - - mon.b - mgr.x - osd.4 - osd.5 - osd.6 - osd.7 - client.1 - - client.2 seed: 3272 sha1: 0597158282e6d69429e60df2354a6c8eed0e5bce sleep_before_teardown: 0 suite: rgw suite_branch: tt-20.2.0-sse-s3-kmip-preview-not-for-production-1 suite_path: /home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa suite_relpath: qa suite_repo: http://git.local/ceph.git suite_sha1: 99e8bef8f767b591604d6078b7861a00c2936d53 targets: vm03.local: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBJZwDUOh3zvib7TVzo4Y24n4OUReeoEY1l5B0ITkmg1Alqtlro/JVK/7fS22qcxfbF2hh6yVUub8V06OJzE5OGQ= vm06.local: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBJufWi95YzKp8OS5O6guCB6+nO+jN5Mpb2ZbTmlHjYf0yAxVcP0LQ3WkYeow+7e1jyetVfeP7zc+9ymQ28vOd6w= vm08.local: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBCc0oOROpmKveC/LHgE0rrQ1CSaqp95S2yb7Ecx/0YH8RexBcAcwxPSmfLVB15OIYJQkCjzsm+mFQBHlSZ5xiBc= tasks: - install: null - ceph: null - openssl_keys: null - rgw: - client.0 - client.1 - client.2 - tox: - client.0 - tox: - client.0 - dedup-tests: client.0: rgw_server: client.0 teuthology: fragments_dropped: [] meta: {} postmerge: [] teuthology_branch: uv2 teuthology_repo: https://github.com/kshtsk/teuthology teuthology_sha1: a59626679648f962bca99d20d35578f2998c8f37 timestamp: 2026-03-31_23:51:22 tube: vps user: supriti verbose: false worker_log: /home/teuthos/.teuthology/dispatcher/dispatcher.vps.282426 2026-04-01T02:20:50.840 INFO:teuthology.run:suite_path is set to /home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa; will attempt to use it 2026-04-01T02:20:50.840 INFO:teuthology.run:Found tasks at /home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks 2026-04-01T02:20:50.840 INFO:teuthology.run_tasks:Running task internal.save_config... 2026-04-01T02:20:50.841 INFO:teuthology.task.internal:Saving configuration 2026-04-01T02:20:50.863 INFO:teuthology.run_tasks:Running task internal.check_lock... 2026-04-01T02:20:50.864 INFO:teuthology.task.internal.check_lock:Checking locks... 2026-04-01T02:20:50.872 DEBUG:teuthology.task.internal.check_lock:machine status is {'name': 'vm03.local', 'description': '/archive/supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps/4640', 'up': True, 'machine_type': 'vps', 'is_vm': True, 'vm_host': {'name': 'localhost', 'description': None, 'up': True, 'machine_type': 'libvirt', 'is_vm': False, 'vm_host': None, 'os_type': None, 'os_version': None, 'arch': None, 'locked': True, 'locked_since': None, 'locked_by': None, 'mac_address': None, 'ssh_pub_key': None}, 'os_type': 'rocky', 'os_version': '9.7', 'arch': 'x86_64', 'locked': True, 'locked_since': '2026-04-01 02:19:28.966833', 'locked_by': 'supriti', 'mac_address': '52:55:00:00:00:03', 'ssh_pub_key': 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBJZwDUOh3zvib7TVzo4Y24n4OUReeoEY1l5B0ITkmg1Alqtlro/JVK/7fS22qcxfbF2hh6yVUub8V06OJzE5OGQ='} 2026-04-01T02:20:50.876 DEBUG:teuthology.task.internal.check_lock:machine status is {'name': 'vm06.local', 'description': '/archive/supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps/4640', 'up': True, 'machine_type': 'vps', 'is_vm': True, 'vm_host': {'name': 'localhost', 'description': None, 'up': True, 'machine_type': 'libvirt', 'is_vm': False, 'vm_host': None, 'os_type': None, 'os_version': None, 'arch': None, 'locked': True, 'locked_since': None, 'locked_by': None, 'mac_address': None, 'ssh_pub_key': None}, 'os_type': 'rocky', 'os_version': '9.7', 'arch': 'x86_64', 'locked': True, 'locked_since': '2026-04-01 02:19:28.967688', 'locked_by': 'supriti', 'mac_address': '52:55:00:00:00:06', 'ssh_pub_key': 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBJufWi95YzKp8OS5O6guCB6+nO+jN5Mpb2ZbTmlHjYf0yAxVcP0LQ3WkYeow+7e1jyetVfeP7zc+9ymQ28vOd6w='} 2026-04-01T02:20:50.881 DEBUG:teuthology.task.internal.check_lock:machine status is {'name': 'vm08.local', 'description': '/archive/supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps/4640', 'up': True, 'machine_type': 'vps', 'is_vm': True, 'vm_host': {'name': 'localhost', 'description': None, 'up': True, 'machine_type': 'libvirt', 'is_vm': False, 'vm_host': None, 'os_type': None, 'os_version': None, 'arch': None, 'locked': True, 'locked_since': None, 'locked_by': None, 'mac_address': None, 'ssh_pub_key': None}, 'os_type': 'rocky', 'os_version': '9.7', 'arch': 'x86_64', 'locked': True, 'locked_since': '2026-04-01 02:19:28.967307', 'locked_by': 'supriti', 'mac_address': '52:55:00:00:00:08', 'ssh_pub_key': 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBCc0oOROpmKveC/LHgE0rrQ1CSaqp95S2yb7Ecx/0YH8RexBcAcwxPSmfLVB15OIYJQkCjzsm+mFQBHlSZ5xiBc='} 2026-04-01T02:20:50.881 INFO:teuthology.run_tasks:Running task internal.add_remotes... 2026-04-01T02:20:50.882 INFO:teuthology.task.internal:roles: ubuntu@vm03.local - ['mon.a', 'mon.c', 'mgr.y', 'osd.0', 'osd.1', 'osd.2', 'osd.3', 'client.0'] 2026-04-01T02:20:50.882 INFO:teuthology.task.internal:roles: ubuntu@vm06.local - ['mon.b', 'mgr.x', 'osd.4', 'osd.5', 'osd.6', 'osd.7', 'client.1'] 2026-04-01T02:20:50.882 INFO:teuthology.task.internal:roles: ubuntu@vm08.local - ['client.2'] 2026-04-01T02:20:50.882 INFO:teuthology.run_tasks:Running task console_log... 2026-04-01T02:20:50.888 DEBUG:teuthology.task.console_log:vm03 does not support IPMI; excluding 2026-04-01T02:20:50.894 DEBUG:teuthology.task.console_log:vm06 does not support IPMI; excluding 2026-04-01T02:20:50.899 DEBUG:teuthology.task.console_log:vm08 does not support IPMI; excluding 2026-04-01T02:20:50.899 DEBUG:teuthology.exit:Installing handler: Handler(exiter=, func=.kill_console_loggers at 0x7fd63de65510>, signals=[15]) 2026-04-01T02:20:50.899 INFO:teuthology.run_tasks:Running task internal.connect... 2026-04-01T02:20:50.900 INFO:teuthology.task.internal:Opening connections... 2026-04-01T02:20:50.900 DEBUG:teuthology.task.internal:connecting to ubuntu@vm03.local 2026-04-01T02:20:50.900 DEBUG:teuthology.orchestra.connection:{'hostname': 'vm03.local', 'username': 'ubuntu', 'timeout': 60} 2026-04-01T02:20:50.964 DEBUG:teuthology.task.internal:connecting to ubuntu@vm06.local 2026-04-01T02:20:50.965 DEBUG:teuthology.orchestra.connection:{'hostname': 'vm06.local', 'username': 'ubuntu', 'timeout': 60} 2026-04-01T02:20:51.026 DEBUG:teuthology.task.internal:connecting to ubuntu@vm08.local 2026-04-01T02:20:51.027 DEBUG:teuthology.orchestra.connection:{'hostname': 'vm08.local', 'username': 'ubuntu', 'timeout': 60} 2026-04-01T02:20:51.086 INFO:teuthology.run_tasks:Running task internal.push_inventory... 2026-04-01T02:20:51.087 DEBUG:teuthology.orchestra.run.vm03:> uname -m 2026-04-01T02:20:51.101 INFO:teuthology.orchestra.run.vm03.stdout:x86_64 2026-04-01T02:20:51.101 DEBUG:teuthology.orchestra.run.vm03:> cat /etc/os-release 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:NAME="Rocky Linux" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:VERSION="9.7 (Blue Onyx)" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:ID="rocky" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:ID_LIKE="rhel centos fedora" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:VERSION_ID="9.7" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:PLATFORM_ID="platform:el9" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:PRETTY_NAME="Rocky Linux 9.7 (Blue Onyx)" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:ANSI_COLOR="0;32" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:LOGO="fedora-logo-icon" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:CPE_NAME="cpe:/o:rocky:rocky:9::baseos" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:HOME_URL="https://rockylinux.org/" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:VENDOR_NAME="RESF" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:VENDOR_URL="https://resf.org/" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:BUG_REPORT_URL="https://bugs.rockylinux.org/" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:SUPPORT_END="2032-05-31" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:ROCKY_SUPPORT_PRODUCT="Rocky-Linux-9" 2026-04-01T02:20:51.155 INFO:teuthology.orchestra.run.vm03.stdout:ROCKY_SUPPORT_PRODUCT_VERSION="9.7" 2026-04-01T02:20:51.156 INFO:teuthology.orchestra.run.vm03.stdout:REDHAT_SUPPORT_PRODUCT="Rocky Linux" 2026-04-01T02:20:51.156 INFO:teuthology.orchestra.run.vm03.stdout:REDHAT_SUPPORT_PRODUCT_VERSION="9.7" 2026-04-01T02:20:51.156 INFO:teuthology.lock.ops:Updating vm03.local on lock server 2026-04-01T02:20:51.160 DEBUG:teuthology.orchestra.run.vm06:> uname -m 2026-04-01T02:20:51.175 INFO:teuthology.orchestra.run.vm06.stdout:x86_64 2026-04-01T02:20:51.175 DEBUG:teuthology.orchestra.run.vm06:> cat /etc/os-release 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:NAME="Rocky Linux" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:VERSION="9.7 (Blue Onyx)" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:ID="rocky" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:ID_LIKE="rhel centos fedora" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:VERSION_ID="9.7" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:PLATFORM_ID="platform:el9" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:PRETTY_NAME="Rocky Linux 9.7 (Blue Onyx)" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:ANSI_COLOR="0;32" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:LOGO="fedora-logo-icon" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:CPE_NAME="cpe:/o:rocky:rocky:9::baseos" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:HOME_URL="https://rockylinux.org/" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:VENDOR_NAME="RESF" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:VENDOR_URL="https://resf.org/" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:BUG_REPORT_URL="https://bugs.rockylinux.org/" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:SUPPORT_END="2032-05-31" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:ROCKY_SUPPORT_PRODUCT="Rocky-Linux-9" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:ROCKY_SUPPORT_PRODUCT_VERSION="9.7" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:REDHAT_SUPPORT_PRODUCT="Rocky Linux" 2026-04-01T02:20:51.233 INFO:teuthology.orchestra.run.vm06.stdout:REDHAT_SUPPORT_PRODUCT_VERSION="9.7" 2026-04-01T02:20:51.233 INFO:teuthology.lock.ops:Updating vm06.local on lock server 2026-04-01T02:20:51.238 DEBUG:teuthology.orchestra.run.vm08:> uname -m 2026-04-01T02:20:51.252 INFO:teuthology.orchestra.run.vm08.stdout:x86_64 2026-04-01T02:20:51.252 DEBUG:teuthology.orchestra.run.vm08:> cat /etc/os-release 2026-04-01T02:20:51.306 INFO:teuthology.orchestra.run.vm08.stdout:NAME="Rocky Linux" 2026-04-01T02:20:51.306 INFO:teuthology.orchestra.run.vm08.stdout:VERSION="9.7 (Blue Onyx)" 2026-04-01T02:20:51.306 INFO:teuthology.orchestra.run.vm08.stdout:ID="rocky" 2026-04-01T02:20:51.306 INFO:teuthology.orchestra.run.vm08.stdout:ID_LIKE="rhel centos fedora" 2026-04-01T02:20:51.306 INFO:teuthology.orchestra.run.vm08.stdout:VERSION_ID="9.7" 2026-04-01T02:20:51.306 INFO:teuthology.orchestra.run.vm08.stdout:PLATFORM_ID="platform:el9" 2026-04-01T02:20:51.306 INFO:teuthology.orchestra.run.vm08.stdout:PRETTY_NAME="Rocky Linux 9.7 (Blue Onyx)" 2026-04-01T02:20:51.306 INFO:teuthology.orchestra.run.vm08.stdout:ANSI_COLOR="0;32" 2026-04-01T02:20:51.306 INFO:teuthology.orchestra.run.vm08.stdout:LOGO="fedora-logo-icon" 2026-04-01T02:20:51.307 INFO:teuthology.orchestra.run.vm08.stdout:CPE_NAME="cpe:/o:rocky:rocky:9::baseos" 2026-04-01T02:20:51.307 INFO:teuthology.orchestra.run.vm08.stdout:HOME_URL="https://rockylinux.org/" 2026-04-01T02:20:51.307 INFO:teuthology.orchestra.run.vm08.stdout:VENDOR_NAME="RESF" 2026-04-01T02:20:51.307 INFO:teuthology.orchestra.run.vm08.stdout:VENDOR_URL="https://resf.org/" 2026-04-01T02:20:51.307 INFO:teuthology.orchestra.run.vm08.stdout:BUG_REPORT_URL="https://bugs.rockylinux.org/" 2026-04-01T02:20:51.307 INFO:teuthology.orchestra.run.vm08.stdout:SUPPORT_END="2032-05-31" 2026-04-01T02:20:51.307 INFO:teuthology.orchestra.run.vm08.stdout:ROCKY_SUPPORT_PRODUCT="Rocky-Linux-9" 2026-04-01T02:20:51.307 INFO:teuthology.orchestra.run.vm08.stdout:ROCKY_SUPPORT_PRODUCT_VERSION="9.7" 2026-04-01T02:20:51.307 INFO:teuthology.orchestra.run.vm08.stdout:REDHAT_SUPPORT_PRODUCT="Rocky Linux" 2026-04-01T02:20:51.307 INFO:teuthology.orchestra.run.vm08.stdout:REDHAT_SUPPORT_PRODUCT_VERSION="9.7" 2026-04-01T02:20:51.307 INFO:teuthology.lock.ops:Updating vm08.local on lock server 2026-04-01T02:20:51.311 INFO:teuthology.run_tasks:Running task internal.serialize_remote_roles... 2026-04-01T02:20:51.313 INFO:teuthology.run_tasks:Running task internal.check_conflict... 2026-04-01T02:20:51.313 INFO:teuthology.task.internal:Checking for old test directory... 2026-04-01T02:20:51.314 DEBUG:teuthology.orchestra.run.vm03:> test '!' -e /home/ubuntu/cephtest 2026-04-01T02:20:51.315 DEBUG:teuthology.orchestra.run.vm06:> test '!' -e /home/ubuntu/cephtest 2026-04-01T02:20:51.317 DEBUG:teuthology.orchestra.run.vm08:> test '!' -e /home/ubuntu/cephtest 2026-04-01T02:20:51.360 INFO:teuthology.run_tasks:Running task internal.check_ceph_data... 2026-04-01T02:20:51.361 INFO:teuthology.task.internal:Checking for non-empty /var/lib/ceph... 2026-04-01T02:20:51.361 DEBUG:teuthology.orchestra.run.vm03:> test -z $(ls -A /var/lib/ceph) 2026-04-01T02:20:51.370 DEBUG:teuthology.orchestra.run.vm06:> test -z $(ls -A /var/lib/ceph) 2026-04-01T02:20:51.372 DEBUG:teuthology.orchestra.run.vm08:> test -z $(ls -A /var/lib/ceph) 2026-04-01T02:20:51.383 INFO:teuthology.orchestra.run.vm03.stderr:ls: cannot access '/var/lib/ceph': No such file or directory 2026-04-01T02:20:51.386 INFO:teuthology.orchestra.run.vm06.stderr:ls: cannot access '/var/lib/ceph': No such file or directory 2026-04-01T02:20:51.417 INFO:teuthology.orchestra.run.vm08.stderr:ls: cannot access '/var/lib/ceph': No such file or directory 2026-04-01T02:20:51.418 INFO:teuthology.run_tasks:Running task internal.vm_setup... 2026-04-01T02:20:51.425 DEBUG:teuthology.orchestra.run.vm03:> test -e /ceph-qa-ready 2026-04-01T02:20:51.438 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T02:20:51.647 DEBUG:teuthology.orchestra.run.vm06:> test -e /ceph-qa-ready 2026-04-01T02:20:51.664 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T02:20:51.856 DEBUG:teuthology.orchestra.run.vm08:> test -e /ceph-qa-ready 2026-04-01T02:20:51.871 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T02:20:52.059 INFO:teuthology.run_tasks:Running task internal.base... 2026-04-01T02:20:52.060 INFO:teuthology.task.internal:Creating test directory... 2026-04-01T02:20:52.061 DEBUG:teuthology.orchestra.run.vm03:> mkdir -p -m0755 -- /home/ubuntu/cephtest 2026-04-01T02:20:52.062 DEBUG:teuthology.orchestra.run.vm06:> mkdir -p -m0755 -- /home/ubuntu/cephtest 2026-04-01T02:20:52.064 DEBUG:teuthology.orchestra.run.vm08:> mkdir -p -m0755 -- /home/ubuntu/cephtest 2026-04-01T02:20:52.079 INFO:teuthology.run_tasks:Running task internal.archive_upload... 2026-04-01T02:20:52.080 INFO:teuthology.run_tasks:Running task internal.archive... 2026-04-01T02:20:52.081 INFO:teuthology.task.internal:Creating archive directory... 2026-04-01T02:20:52.081 DEBUG:teuthology.orchestra.run.vm03:> install -d -m0755 -- /home/ubuntu/cephtest/archive 2026-04-01T02:20:52.119 DEBUG:teuthology.orchestra.run.vm06:> install -d -m0755 -- /home/ubuntu/cephtest/archive 2026-04-01T02:20:52.122 DEBUG:teuthology.orchestra.run.vm08:> install -d -m0755 -- /home/ubuntu/cephtest/archive 2026-04-01T02:20:52.141 INFO:teuthology.run_tasks:Running task internal.coredump... 2026-04-01T02:20:52.143 INFO:teuthology.task.internal:Enabling coredump saving... 2026-04-01T02:20:52.143 DEBUG:teuthology.orchestra.run.vm03:> test -f /run/.containerenv -o -f /.dockerenv 2026-04-01T02:20:52.188 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T02:20:52.188 DEBUG:teuthology.orchestra.run.vm06:> test -f /run/.containerenv -o -f /.dockerenv 2026-04-01T02:20:52.203 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T02:20:52.203 DEBUG:teuthology.orchestra.run.vm08:> test -f /run/.containerenv -o -f /.dockerenv 2026-04-01T02:20:52.218 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T02:20:52.218 DEBUG:teuthology.orchestra.run.vm03:> install -d -m0755 -- /home/ubuntu/cephtest/archive/coredump && sudo sysctl -w kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core && echo kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core | sudo tee -a /etc/sysctl.conf 2026-04-01T02:20:52.230 DEBUG:teuthology.orchestra.run.vm06:> install -d -m0755 -- /home/ubuntu/cephtest/archive/coredump && sudo sysctl -w kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core && echo kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core | sudo tee -a /etc/sysctl.conf 2026-04-01T02:20:52.245 DEBUG:teuthology.orchestra.run.vm08:> install -d -m0755 -- /home/ubuntu/cephtest/archive/coredump && sudo sysctl -w kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core && echo kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core | sudo tee -a /etc/sysctl.conf 2026-04-01T02:20:52.253 INFO:teuthology.orchestra.run.vm03.stdout:kernel.core_pattern = /home/ubuntu/cephtest/archive/coredump/%t.%p.core 2026-04-01T02:20:52.263 INFO:teuthology.orchestra.run.vm03.stdout:kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core 2026-04-01T02:20:52.269 INFO:teuthology.orchestra.run.vm06.stdout:kernel.core_pattern = /home/ubuntu/cephtest/archive/coredump/%t.%p.core 2026-04-01T02:20:52.278 INFO:teuthology.orchestra.run.vm06.stdout:kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core 2026-04-01T02:20:52.285 INFO:teuthology.orchestra.run.vm08.stdout:kernel.core_pattern = /home/ubuntu/cephtest/archive/coredump/%t.%p.core 2026-04-01T02:20:52.294 INFO:teuthology.orchestra.run.vm08.stdout:kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core 2026-04-01T02:20:52.295 INFO:teuthology.run_tasks:Running task internal.sudo... 2026-04-01T02:20:52.296 INFO:teuthology.task.internal:Configuring sudo... 2026-04-01T02:20:52.296 DEBUG:teuthology.orchestra.run.vm03:> sudo sed -i.orig.teuthology -e 's/^\([^#]*\) \(requiretty\)/\1 !\2/g' -e 's/^\([^#]*\) !\(visiblepw\)/\1 \2/g' /etc/sudoers 2026-04-01T02:20:52.307 DEBUG:teuthology.orchestra.run.vm06:> sudo sed -i.orig.teuthology -e 's/^\([^#]*\) \(requiretty\)/\1 !\2/g' -e 's/^\([^#]*\) !\(visiblepw\)/\1 \2/g' /etc/sudoers 2026-04-01T02:20:52.321 DEBUG:teuthology.orchestra.run.vm08:> sudo sed -i.orig.teuthology -e 's/^\([^#]*\) \(requiretty\)/\1 !\2/g' -e 's/^\([^#]*\) !\(visiblepw\)/\1 \2/g' /etc/sudoers 2026-04-01T02:20:52.358 INFO:teuthology.run_tasks:Running task internal.syslog... 2026-04-01T02:20:52.360 INFO:teuthology.task.internal.syslog:Starting syslog monitoring... 2026-04-01T02:20:52.361 DEBUG:teuthology.orchestra.run.vm03:> mkdir -p -m0755 -- /home/ubuntu/cephtest/archive/syslog 2026-04-01T02:20:52.373 DEBUG:teuthology.orchestra.run.vm06:> mkdir -p -m0755 -- /home/ubuntu/cephtest/archive/syslog 2026-04-01T02:20:52.384 DEBUG:teuthology.orchestra.run.vm08:> mkdir -p -m0755 -- /home/ubuntu/cephtest/archive/syslog 2026-04-01T02:20:52.412 DEBUG:teuthology.orchestra.run.vm03:> install -m 666 /dev/null /home/ubuntu/cephtest/archive/syslog/kern.log 2026-04-01T02:20:52.448 DEBUG:teuthology.orchestra.run.vm03:> install -m 666 /dev/null /home/ubuntu/cephtest/archive/syslog/misc.log 2026-04-01T02:20:52.503 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:20:52.503 DEBUG:teuthology.orchestra.run.vm03:> sudo dd of=/etc/rsyslog.d/80-cephtest.conf 2026-04-01T02:20:52.560 DEBUG:teuthology.orchestra.run.vm06:> install -m 666 /dev/null /home/ubuntu/cephtest/archive/syslog/kern.log 2026-04-01T02:20:52.581 DEBUG:teuthology.orchestra.run.vm06:> install -m 666 /dev/null /home/ubuntu/cephtest/archive/syslog/misc.log 2026-04-01T02:20:52.635 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:20:52.635 DEBUG:teuthology.orchestra.run.vm06:> sudo dd of=/etc/rsyslog.d/80-cephtest.conf 2026-04-01T02:20:52.691 DEBUG:teuthology.orchestra.run.vm08:> install -m 666 /dev/null /home/ubuntu/cephtest/archive/syslog/kern.log 2026-04-01T02:20:52.713 DEBUG:teuthology.orchestra.run.vm08:> install -m 666 /dev/null /home/ubuntu/cephtest/archive/syslog/misc.log 2026-04-01T02:20:52.770 DEBUG:teuthology.orchestra.run.vm08:> set -ex 2026-04-01T02:20:52.770 DEBUG:teuthology.orchestra.run.vm08:> sudo dd of=/etc/rsyslog.d/80-cephtest.conf 2026-04-01T02:20:52.834 DEBUG:teuthology.orchestra.run.vm03:> sudo service rsyslog restart 2026-04-01T02:20:52.836 DEBUG:teuthology.orchestra.run.vm06:> sudo service rsyslog restart 2026-04-01T02:20:52.838 DEBUG:teuthology.orchestra.run.vm08:> sudo service rsyslog restart 2026-04-01T02:20:52.862 INFO:teuthology.orchestra.run.vm03.stderr:Redirecting to /bin/systemctl restart rsyslog.service 2026-04-01T02:20:52.862 INFO:teuthology.orchestra.run.vm06.stderr:Redirecting to /bin/systemctl restart rsyslog.service 2026-04-01T02:20:52.901 INFO:teuthology.orchestra.run.vm08.stderr:Redirecting to /bin/systemctl restart rsyslog.service 2026-04-01T02:20:53.198 INFO:teuthology.run_tasks:Running task internal.timer... 2026-04-01T02:20:53.200 INFO:teuthology.task.internal:Starting timer... 2026-04-01T02:20:53.200 INFO:teuthology.run_tasks:Running task pcp... 2026-04-01T02:20:53.214 INFO:teuthology.run_tasks:Running task selinux... 2026-04-01T02:20:53.216 DEBUG:teuthology.task:Applying overrides for task selinux: {'allowlist': ['scontext=system_u:system_r:getty_t:s0']} 2026-04-01T02:20:53.216 INFO:teuthology.task.selinux:Excluding vm03: VMs are not yet supported 2026-04-01T02:20:53.216 INFO:teuthology.task.selinux:Excluding vm06: VMs are not yet supported 2026-04-01T02:20:53.216 INFO:teuthology.task.selinux:Excluding vm08: VMs are not yet supported 2026-04-01T02:20:53.216 DEBUG:teuthology.task.selinux:Getting current SELinux state 2026-04-01T02:20:53.216 DEBUG:teuthology.task.selinux:Existing SELinux modes: {} 2026-04-01T02:20:53.216 INFO:teuthology.task.selinux:Putting SELinux into permissive mode 2026-04-01T02:20:53.216 INFO:teuthology.run_tasks:Running task ansible.cephlab... 2026-04-01T02:20:53.228 DEBUG:teuthology.task:Applying overrides for task ansible.cephlab: {'branch': 'main', 'repo': 'https://github.com/kshtsk/ceph-cm-ansible.git', 'skip_tags': 'nagios,monitoring-scripts,hostname,pubkeys,zap,sudoers,kerberos,ntp-client,resolvconf,cpan,nfs', 'vars': {'logical_volumes': {'lv_1': {'scratch_dev': True, 'size': '25%VG', 'vg': 'vg_nvme'}, 'lv_2': {'scratch_dev': True, 'size': '25%VG', 'vg': 'vg_nvme'}, 'lv_3': {'scratch_dev': True, 'size': '25%VG', 'vg': 'vg_nvme'}, 'lv_4': {'scratch_dev': True, 'size': '25%VG', 'vg': 'vg_nvme'}}, 'timezone': 'UTC', 'volume_groups': {'vg_nvme': {'pvs': '/dev/vdb,/dev/vdc,/dev/vdd,/dev/vde'}}}} 2026-04-01T02:20:53.228 DEBUG:teuthology.repo_utils:Setting repo remote to https://github.com/kshtsk/ceph-cm-ansible.git 2026-04-01T02:20:53.229 INFO:teuthology.repo_utils:Fetching github.com_kshtsk_ceph-cm-ansible_main from origin 2026-04-01T02:20:53.812 DEBUG:teuthology.repo_utils:Resetting repo at /home/teuthos/src/github.com_kshtsk_ceph-cm-ansible_main to origin/main 2026-04-01T02:20:53.829 INFO:teuthology.task.ansible:Playbook: [{'import_playbook': 'ansible_managed.yml'}, {'import_playbook': 'teuthology.yml'}, {'hosts': 'testnodes', 'tasks': [{'set_fact': {'ran_from_cephlab_playbook': True}}]}, {'import_playbook': 'testnodes.yml'}, {'import_playbook': 'container-host.yml'}, {'import_playbook': 'cobbler.yml'}, {'import_playbook': 'paddles.yml'}, {'import_playbook': 'pulpito.yml'}, {'hosts': 'testnodes', 'become': True, 'tasks': [{'name': 'Touch /ceph-qa-ready', 'file': {'path': '/ceph-qa-ready', 'state': 'touch'}, 'when': 'ran_from_cephlab_playbook|bool'}]}] 2026-04-01T02:20:53.830 DEBUG:teuthology.task.ansible:Running ansible-playbook -v --extra-vars '{"ansible_ssh_user": "ubuntu", "logical_volumes": {"lv_1": {"scratch_dev": true, "size": "25%VG", "vg": "vg_nvme"}, "lv_2": {"scratch_dev": true, "size": "25%VG", "vg": "vg_nvme"}, "lv_3": {"scratch_dev": true, "size": "25%VG", "vg": "vg_nvme"}, "lv_4": {"scratch_dev": true, "size": "25%VG", "vg": "vg_nvme"}}, "timezone": "UTC", "volume_groups": {"vg_nvme": {"pvs": "/dev/vdb,/dev/vdc,/dev/vdd,/dev/vde"}}}' -i /tmp/teuth_ansible_inventory_0mr2v95 --limit vm03.local,vm06.local,vm08.local /home/teuthos/src/github.com_kshtsk_ceph-cm-ansible_main/cephlab.yml --skip-tags nagios,monitoring-scripts,hostname,pubkeys,zap,sudoers,kerberos,ntp-client,resolvconf,cpan,nfs 2026-04-01T02:22:57.751 DEBUG:teuthology.task.ansible:Reconnecting to [Remote(name='ubuntu@vm03.local'), Remote(name='ubuntu@vm06.local'), Remote(name='ubuntu@vm08.local')] 2026-04-01T02:22:57.751 INFO:teuthology.orchestra.remote:Trying to reconnect to host 'ubuntu@vm03.local' 2026-04-01T02:22:57.752 DEBUG:teuthology.orchestra.connection:{'hostname': 'vm03.local', 'username': 'ubuntu', 'timeout': 60} 2026-04-01T02:22:57.816 DEBUG:teuthology.orchestra.run.vm03:> true 2026-04-01T02:22:57.894 INFO:teuthology.orchestra.remote:Successfully reconnected to host 'ubuntu@vm03.local' 2026-04-01T02:22:57.894 INFO:teuthology.orchestra.remote:Trying to reconnect to host 'ubuntu@vm06.local' 2026-04-01T02:22:57.895 DEBUG:teuthology.orchestra.connection:{'hostname': 'vm06.local', 'username': 'ubuntu', 'timeout': 60} 2026-04-01T02:22:57.962 DEBUG:teuthology.orchestra.run.vm06:> true 2026-04-01T02:22:58.045 INFO:teuthology.orchestra.remote:Successfully reconnected to host 'ubuntu@vm06.local' 2026-04-01T02:22:58.045 INFO:teuthology.orchestra.remote:Trying to reconnect to host 'ubuntu@vm08.local' 2026-04-01T02:22:58.045 DEBUG:teuthology.orchestra.connection:{'hostname': 'vm08.local', 'username': 'ubuntu', 'timeout': 60} 2026-04-01T02:22:58.107 DEBUG:teuthology.orchestra.run.vm08:> true 2026-04-01T02:22:58.179 INFO:teuthology.orchestra.remote:Successfully reconnected to host 'ubuntu@vm08.local' 2026-04-01T02:22:58.179 INFO:teuthology.run_tasks:Running task clock... 2026-04-01T02:22:58.181 INFO:teuthology.task.clock:Syncing clocks and checking initial clock skew... 2026-04-01T02:22:58.181 INFO:teuthology.orchestra.run:Running command with timeout 360 2026-04-01T02:22:58.181 DEBUG:teuthology.orchestra.run.vm03:> sudo systemctl stop ntp.service || sudo systemctl stop ntpd.service || sudo systemctl stop chronyd.service ; sudo ntpd -gq || sudo chronyc makestep ; sudo systemctl start ntp.service || sudo systemctl start ntpd.service || sudo systemctl start chronyd.service ; PATH=/usr/bin:/usr/sbin ntpq -p || PATH=/usr/bin:/usr/sbin chronyc sources || true 2026-04-01T02:22:58.183 INFO:teuthology.orchestra.run:Running command with timeout 360 2026-04-01T02:22:58.183 DEBUG:teuthology.orchestra.run.vm06:> sudo systemctl stop ntp.service || sudo systemctl stop ntpd.service || sudo systemctl stop chronyd.service ; sudo ntpd -gq || sudo chronyc makestep ; sudo systemctl start ntp.service || sudo systemctl start ntpd.service || sudo systemctl start chronyd.service ; PATH=/usr/bin:/usr/sbin ntpq -p || PATH=/usr/bin:/usr/sbin chronyc sources || true 2026-04-01T02:22:58.184 INFO:teuthology.orchestra.run:Running command with timeout 360 2026-04-01T02:22:58.184 DEBUG:teuthology.orchestra.run.vm08:> sudo systemctl stop ntp.service || sudo systemctl stop ntpd.service || sudo systemctl stop chronyd.service ; sudo ntpd -gq || sudo chronyc makestep ; sudo systemctl start ntp.service || sudo systemctl start ntpd.service || sudo systemctl start chronyd.service ; PATH=/usr/bin:/usr/sbin ntpq -p || PATH=/usr/bin:/usr/sbin chronyc sources || true 2026-04-01T02:22:58.218 INFO:teuthology.orchestra.run.vm06.stderr:Failed to stop ntp.service: Unit ntp.service not loaded. 2026-04-01T02:22:58.218 INFO:teuthology.orchestra.run.vm03.stderr:Failed to stop ntp.service: Unit ntp.service not loaded. 2026-04-01T02:22:58.236 INFO:teuthology.orchestra.run.vm06.stderr:Failed to stop ntpd.service: Unit ntpd.service not loaded. 2026-04-01T02:22:58.237 INFO:teuthology.orchestra.run.vm03.stderr:Failed to stop ntpd.service: Unit ntpd.service not loaded. 2026-04-01T02:22:58.255 INFO:teuthology.orchestra.run.vm08.stderr:Failed to stop ntp.service: Unit ntp.service not loaded. 2026-04-01T02:22:58.264 INFO:teuthology.orchestra.run.vm06.stderr:sudo: ntpd: command not found 2026-04-01T02:22:58.268 INFO:teuthology.orchestra.run.vm03.stderr:sudo: ntpd: command not found 2026-04-01T02:22:58.276 INFO:teuthology.orchestra.run.vm08.stderr:Failed to stop ntpd.service: Unit ntpd.service not loaded. 2026-04-01T02:22:58.279 INFO:teuthology.orchestra.run.vm06.stdout:506 Cannot talk to daemon 2026-04-01T02:22:58.280 INFO:teuthology.orchestra.run.vm03.stdout:506 Cannot talk to daemon 2026-04-01T02:22:58.294 INFO:teuthology.orchestra.run.vm03.stderr:Failed to start ntp.service: Unit ntp.service not found. 2026-04-01T02:22:58.297 INFO:teuthology.orchestra.run.vm06.stderr:Failed to start ntp.service: Unit ntp.service not found. 2026-04-01T02:22:58.306 INFO:teuthology.orchestra.run.vm08.stderr:sudo: ntpd: command not found 2026-04-01T02:22:58.310 INFO:teuthology.orchestra.run.vm03.stderr:Failed to start ntpd.service: Unit ntpd.service not found. 2026-04-01T02:22:58.313 INFO:teuthology.orchestra.run.vm06.stderr:Failed to start ntpd.service: Unit ntpd.service not found. 2026-04-01T02:22:58.319 INFO:teuthology.orchestra.run.vm08.stdout:506 Cannot talk to daemon 2026-04-01T02:22:58.336 INFO:teuthology.orchestra.run.vm08.stderr:Failed to start ntp.service: Unit ntp.service not found. 2026-04-01T02:22:58.352 INFO:teuthology.orchestra.run.vm08.stderr:Failed to start ntpd.service: Unit ntpd.service not found. 2026-04-01T02:22:58.368 INFO:teuthology.orchestra.run.vm03.stderr:bash: line 1: ntpq: command not found 2026-04-01T02:22:58.368 INFO:teuthology.orchestra.run.vm06.stderr:bash: line 1: ntpq: command not found 2026-04-01T02:22:58.411 INFO:teuthology.orchestra.run.vm08.stderr:bash: line 1: ntpq: command not found 2026-04-01T02:22:58.500 INFO:teuthology.orchestra.run.vm08.stdout:MS Name/IP address Stratum Poll Reach LastRx Last sample 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm08.stdout:=============================================================================== 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm08.stdout:^? cloudrouter.1in1.net 0 6 0 - +0ns[ +0ns] +/- 0ns 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm08.stdout:^? netcup01.theravenhub.com 0 6 0 - +0ns[ +0ns] +/- 0ns 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm08.stdout:^? hfu.ovh 0 6 0 - +0ns[ +0ns] +/- 0ns 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm08.stdout:^? static-217-115-11-162.in> 0 6 0 - +0ns[ +0ns] +/- 0ns 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm06.stdout:MS Name/IP address Stratum Poll Reach LastRx Last sample 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm06.stdout:=============================================================================== 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm06.stdout:^? netcup01.theravenhub.com 0 6 0 - +0ns[ +0ns] +/- 0ns 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm06.stdout:^? hfu.ovh 0 6 0 - +0ns[ +0ns] +/- 0ns 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm06.stdout:^? static-217-115-11-162.in> 0 6 0 - +0ns[ +0ns] +/- 0ns 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm06.stdout:^? cloudrouter.1in1.net 0 6 0 - +0ns[ +0ns] +/- 0ns 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm03.stdout:MS Name/IP address Stratum Poll Reach LastRx Last sample 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm03.stdout:=============================================================================== 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm03.stdout:^? cloudrouter.1in1.net 0 6 0 - +0ns[ +0ns] +/- 0ns 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm03.stdout:^? netcup01.theravenhub.com 0 6 0 - +0ns[ +0ns] +/- 0ns 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm03.stdout:^? hfu.ovh 0 6 0 - +0ns[ +0ns] +/- 0ns 2026-04-01T02:22:58.501 INFO:teuthology.orchestra.run.vm03.stdout:^? static-217-115-11-162.in> 0 6 0 - +0ns[ +0ns] +/- 0ns 2026-04-01T02:22:58.502 INFO:teuthology.run_tasks:Running task install... 2026-04-01T02:22:58.503 DEBUG:teuthology.task.install:project ceph 2026-04-01T02:22:58.503 DEBUG:teuthology.task.install:INSTALL overrides: {'ceph': {'flavor': 'default', 'sha1': '0597158282e6d69429e60df2354a6c8eed0e5bce'}, 'extra_system_packages': {'deb': ['python3-jmespath', 'python3-xmltodict', 's3cmd'], 'rpm': ['bzip2', 'perl-Test-Harness', 'python3-jmespath', 'python3-xmltodict', 's3cmd']}, 'repos': [{'name': 'ceph-source', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/SRPMS'}, {'name': 'ceph-noarch', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/noarch'}, {'name': 'ceph', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/x86_64'}]} 2026-04-01T02:22:58.503 DEBUG:teuthology.task.install:config {'flavor': 'default', 'sha1': '0597158282e6d69429e60df2354a6c8eed0e5bce', 'extra_system_packages': {'deb': ['python3-jmespath', 'python3-xmltodict', 's3cmd'], 'rpm': ['bzip2', 'perl-Test-Harness', 'python3-jmespath', 'python3-xmltodict', 's3cmd']}} 2026-04-01T02:22:58.503 INFO:teuthology.task.install:Using flavor: default 2026-04-01T02:22:58.506 DEBUG:teuthology.task.install:Package list is: {'deb': ['ceph', 'cephadm', 'ceph-mds', 'ceph-mgr', 'ceph-common', 'ceph-fuse', 'ceph-test', 'ceph-volume', 'radosgw', 'python3-rados', 'python3-rgw', 'python3-cephfs', 'python3-rbd', 'libcephfs2', 'libcephfs-dev', 'librados2', 'librbd1', 'rbd-fuse'], 'rpm': ['ceph-radosgw', 'ceph-test', 'ceph', 'ceph-base', 'cephadm', 'ceph-immutable-object-cache', 'ceph-mgr', 'ceph-mgr-dashboard', 'ceph-mgr-diskprediction-local', 'ceph-mgr-rook', 'ceph-mgr-cephadm', 'ceph-fuse', 'ceph-volume', 'librados-devel', 'libcephfs2', 'libcephfs-devel', 'librados2', 'librbd1', 'python3-rados', 'python3-rgw', 'python3-cephfs', 'python3-rbd', 'rbd-fuse', 'rbd-mirror', 'rbd-nbd']} 2026-04-01T02:22:58.506 INFO:teuthology.task.install:extra packages: [] 2026-04-01T02:22:58.506 DEBUG:teuthology.task.install.rpm:_update_package_list_and_install: config is {'branch': None, 'cleanup': None, 'debuginfo': None, 'downgrade_packages': [], 'exclude_packages': [], 'extra_packages': [], 'extra_system_packages': {'deb': ['python3-jmespath', 'python3-xmltodict', 's3cmd'], 'rpm': ['bzip2', 'perl-Test-Harness', 'python3-jmespath', 'python3-xmltodict', 's3cmd']}, 'extras': None, 'enable_coprs': [], 'flavor': 'default', 'install_ceph_packages': True, 'packages': {}, 'project': 'ceph', 'repos_only': False, 'sha1': '0597158282e6d69429e60df2354a6c8eed0e5bce', 'tag': None, 'wait_for_package': False, 'repos': [{'name': 'ceph-source', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/SRPMS'}, {'name': 'ceph-noarch', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/noarch'}, {'name': 'ceph', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/x86_64'}]} 2026-04-01T02:22:58.506 DEBUG:teuthology.task.install.rpm:Adding repos: [{'name': 'ceph-source', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/SRPMS'}, {'name': 'ceph-noarch', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/noarch'}, {'name': 'ceph', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/x86_64'}] 2026-04-01T02:22:58.506 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:22:58.506 DEBUG:teuthology.orchestra.run.vm03:> sudo dd of=/etc/yum.repos.d/ceph-source.repo 2026-04-01T02:22:58.506 DEBUG:teuthology.task.install.rpm:_update_package_list_and_install: config is {'branch': None, 'cleanup': None, 'debuginfo': None, 'downgrade_packages': [], 'exclude_packages': [], 'extra_packages': [], 'extra_system_packages': {'deb': ['python3-jmespath', 'python3-xmltodict', 's3cmd'], 'rpm': ['bzip2', 'perl-Test-Harness', 'python3-jmespath', 'python3-xmltodict', 's3cmd']}, 'extras': None, 'enable_coprs': [], 'flavor': 'default', 'install_ceph_packages': True, 'packages': {}, 'project': 'ceph', 'repos_only': False, 'sha1': '0597158282e6d69429e60df2354a6c8eed0e5bce', 'tag': None, 'wait_for_package': False, 'repos': [{'name': 'ceph-source', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/SRPMS'}, {'name': 'ceph-noarch', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/noarch'}, {'name': 'ceph', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/x86_64'}]} 2026-04-01T02:22:58.506 DEBUG:teuthology.task.install.rpm:Adding repos: [{'name': 'ceph-source', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/SRPMS'}, {'name': 'ceph-noarch', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/noarch'}, {'name': 'ceph', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/x86_64'}] 2026-04-01T02:22:58.506 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:22:58.506 DEBUG:teuthology.orchestra.run.vm06:> sudo dd of=/etc/yum.repos.d/ceph-source.repo 2026-04-01T02:22:58.507 DEBUG:teuthology.task.install.rpm:_update_package_list_and_install: config is {'branch': None, 'cleanup': None, 'debuginfo': None, 'downgrade_packages': [], 'exclude_packages': [], 'extra_packages': [], 'extra_system_packages': {'deb': ['python3-jmespath', 'python3-xmltodict', 's3cmd'], 'rpm': ['bzip2', 'perl-Test-Harness', 'python3-jmespath', 'python3-xmltodict', 's3cmd']}, 'extras': None, 'enable_coprs': [], 'flavor': 'default', 'install_ceph_packages': True, 'packages': {}, 'project': 'ceph', 'repos_only': False, 'sha1': '0597158282e6d69429e60df2354a6c8eed0e5bce', 'tag': None, 'wait_for_package': False, 'repos': [{'name': 'ceph-source', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/SRPMS'}, {'name': 'ceph-noarch', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/noarch'}, {'name': 'ceph', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/x86_64'}]} 2026-04-01T02:22:58.507 DEBUG:teuthology.task.install.rpm:Adding repos: [{'name': 'ceph-source', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/SRPMS'}, {'name': 'ceph-noarch', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/noarch'}, {'name': 'ceph', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/x86_64'}] 2026-04-01T02:22:58.507 DEBUG:teuthology.orchestra.run.vm08:> set -ex 2026-04-01T02:22:58.507 DEBUG:teuthology.orchestra.run.vm08:> sudo dd of=/etc/yum.repos.d/ceph-source.repo 2026-04-01T02:22:58.570 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:22:58.570 DEBUG:teuthology.orchestra.run.vm06:> sudo dd of=/etc/yum.repos.d/ceph-noarch.repo 2026-04-01T02:22:58.573 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:22:58.573 DEBUG:teuthology.orchestra.run.vm03:> sudo dd of=/etc/yum.repos.d/ceph-noarch.repo 2026-04-01T02:22:58.574 DEBUG:teuthology.orchestra.run.vm08:> set -ex 2026-04-01T02:22:58.574 DEBUG:teuthology.orchestra.run.vm08:> sudo dd of=/etc/yum.repos.d/ceph-noarch.repo 2026-04-01T02:22:58.639 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:22:58.639 DEBUG:teuthology.orchestra.run.vm06:> sudo dd of=/etc/yum.repos.d/ceph.repo 2026-04-01T02:22:58.645 DEBUG:teuthology.orchestra.run.vm08:> set -ex 2026-04-01T02:22:58.645 DEBUG:teuthology.orchestra.run.vm08:> sudo dd of=/etc/yum.repos.d/ceph.repo 2026-04-01T02:22:58.645 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:22:58.645 DEBUG:teuthology.orchestra.run.vm03:> sudo dd of=/etc/yum.repos.d/ceph.repo 2026-04-01T02:22:58.711 INFO:teuthology.task.install.rpm:Installing packages: ceph-radosgw, ceph-test, ceph, ceph-base, cephadm, ceph-immutable-object-cache, ceph-mgr, ceph-mgr-dashboard, ceph-mgr-diskprediction-local, ceph-mgr-rook, ceph-mgr-cephadm, ceph-fuse, ceph-volume, librados-devel, libcephfs2, libcephfs-devel, librados2, librbd1, python3-rados, python3-rgw, python3-cephfs, python3-rbd, rbd-fuse, rbd-mirror, rbd-nbd, bzip2, perl-Test-Harness, python3-jmespath, python3-xmltodict, s3cmd on remote rpm x86_64 2026-04-01T02:22:58.711 DEBUG:teuthology.orchestra.run.vm06:> sudo yum clean all 2026-04-01T02:22:58.717 INFO:teuthology.task.install.rpm:Installing packages: ceph-radosgw, ceph-test, ceph, ceph-base, cephadm, ceph-immutable-object-cache, ceph-mgr, ceph-mgr-dashboard, ceph-mgr-diskprediction-local, ceph-mgr-rook, ceph-mgr-cephadm, ceph-fuse, ceph-volume, librados-devel, libcephfs2, libcephfs-devel, librados2, librbd1, python3-rados, python3-rgw, python3-cephfs, python3-rbd, rbd-fuse, rbd-mirror, rbd-nbd, bzip2, perl-Test-Harness, python3-jmespath, python3-xmltodict, s3cmd on remote rpm x86_64 2026-04-01T02:22:58.717 DEBUG:teuthology.orchestra.run.vm03:> sudo yum clean all 2026-04-01T02:22:58.719 INFO:teuthology.task.install.rpm:Installing packages: ceph-radosgw, ceph-test, ceph, ceph-base, cephadm, ceph-immutable-object-cache, ceph-mgr, ceph-mgr-dashboard, ceph-mgr-diskprediction-local, ceph-mgr-rook, ceph-mgr-cephadm, ceph-fuse, ceph-volume, librados-devel, libcephfs2, libcephfs-devel, librados2, librbd1, python3-rados, python3-rgw, python3-cephfs, python3-rbd, rbd-fuse, rbd-mirror, rbd-nbd, bzip2, perl-Test-Harness, python3-jmespath, python3-xmltodict, s3cmd on remote rpm x86_64 2026-04-01T02:22:58.719 DEBUG:teuthology.orchestra.run.vm08:> sudo yum clean all 2026-04-01T02:22:58.904 INFO:teuthology.orchestra.run.vm06.stdout:47 files removed 2026-04-01T02:22:58.907 INFO:teuthology.orchestra.run.vm08.stdout:47 files removed 2026-04-01T02:22:58.912 INFO:teuthology.orchestra.run.vm03.stdout:47 files removed 2026-04-01T02:22:58.929 DEBUG:teuthology.orchestra.run.vm08:> sudo yum -y install ceph-radosgw ceph-test ceph ceph-base cephadm ceph-immutable-object-cache ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-cephadm ceph-fuse ceph-volume librados-devel libcephfs2 libcephfs-devel librados2 librbd1 python3-rados python3-rgw python3-cephfs python3-rbd rbd-fuse rbd-mirror rbd-nbd bzip2 perl-Test-Harness python3-jmespath python3-xmltodict s3cmd 2026-04-01T02:22:58.935 DEBUG:teuthology.orchestra.run.vm06:> sudo yum -y install ceph-radosgw ceph-test ceph ceph-base cephadm ceph-immutable-object-cache ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-cephadm ceph-fuse ceph-volume librados-devel libcephfs2 libcephfs-devel librados2 librbd1 python3-rados python3-rgw python3-cephfs python3-rbd rbd-fuse rbd-mirror rbd-nbd bzip2 perl-Test-Harness python3-jmespath python3-xmltodict s3cmd 2026-04-01T02:22:58.944 DEBUG:teuthology.orchestra.run.vm03:> sudo yum -y install ceph-radosgw ceph-test ceph ceph-base cephadm ceph-immutable-object-cache ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-cephadm ceph-fuse ceph-volume librados-devel libcephfs2 libcephfs-devel librados2 librbd1 python3-rados python3-rgw python3-cephfs python3-rbd rbd-fuse rbd-mirror rbd-nbd bzip2 perl-Test-Harness python3-jmespath python3-xmltodict s3cmd 2026-04-01T02:22:59.337 INFO:teuthology.orchestra.run.vm03.stdout:ceph 398 kB/s | 90 kB 00:00 2026-04-01T02:22:59.347 INFO:teuthology.orchestra.run.vm08.stdout:ceph 395 kB/s | 90 kB 00:00 2026-04-01T02:22:59.348 INFO:teuthology.orchestra.run.vm06.stdout:ceph 392 kB/s | 90 kB 00:00 2026-04-01T02:22:59.780 INFO:teuthology.orchestra.run.vm03.stdout:ceph-noarch 60 kB/s | 25 kB 00:00 2026-04-01T02:22:59.787 INFO:teuthology.orchestra.run.vm08.stdout:ceph-noarch 60 kB/s | 25 kB 00:00 2026-04-01T02:22:59.825 INFO:teuthology.orchestra.run.vm06.stdout:ceph-noarch 55 kB/s | 25 kB 00:00 2026-04-01T02:22:59.972 INFO:teuthology.orchestra.run.vm03.stdout:ceph-source 13 kB/s | 2.3 kB 00:00 2026-04-01T02:22:59.977 INFO:teuthology.orchestra.run.vm08.stdout:ceph-source 14 kB/s | 2.3 kB 00:00 2026-04-01T02:23:00.015 INFO:teuthology.orchestra.run.vm06.stdout:ceph-source 14 kB/s | 2.3 kB 00:00 2026-04-01T02:23:02.631 INFO:teuthology.orchestra.run.vm03.stdout:Extra Packages for Enterprise Linux 7.7 MB/s | 20 MB 00:02 2026-04-01T02:23:02.686 INFO:teuthology.orchestra.run.vm08.stdout:Extra Packages for Enterprise Linux 7.6 MB/s | 20 MB 00:02 2026-04-01T02:23:03.398 INFO:teuthology.orchestra.run.vm06.stdout:Extra Packages for Enterprise Linux 6.1 MB/s | 20 MB 00:03 2026-04-01T02:23:07.621 INFO:teuthology.orchestra.run.vm08.stdout:lab-extras 65 kB/s | 50 kB 00:00 2026-04-01T02:23:07.622 INFO:teuthology.orchestra.run.vm03.stdout:lab-extras 64 kB/s | 50 kB 00:00 2026-04-01T02:23:08.331 INFO:teuthology.orchestra.run.vm06.stdout:lab-extras 64 kB/s | 50 kB 00:00 2026-04-01T02:23:08.597 INFO:teuthology.orchestra.run.vm08.stdout:Rocky Linux 9 - BaseOS 19 MB/s | 17 MB 00:00 2026-04-01T02:23:08.598 INFO:teuthology.orchestra.run.vm03.stdout:Rocky Linux 9 - BaseOS 19 MB/s | 17 MB 00:00 2026-04-01T02:23:09.136 INFO:teuthology.orchestra.run.vm06.stdout:Rocky Linux 9 - BaseOS 24 MB/s | 17 MB 00:00 2026-04-01T02:23:10.530 INFO:teuthology.orchestra.run.vm08.stdout:Rocky Linux 9 - AppStream 24 MB/s | 17 MB 00:00 2026-04-01T02:23:10.669 INFO:teuthology.orchestra.run.vm03.stdout:Rocky Linux 9 - AppStream 24 MB/s | 17 MB 00:00 2026-04-01T02:23:11.164 INFO:teuthology.orchestra.run.vm06.stdout:Rocky Linux 9 - AppStream 23 MB/s | 17 MB 00:00 2026-04-01T02:23:12.911 INFO:teuthology.orchestra.run.vm08.stdout:Rocky Linux 9 - CRB 8.3 MB/s | 4.3 MB 00:00 2026-04-01T02:23:13.180 INFO:teuthology.orchestra.run.vm03.stdout:Rocky Linux 9 - CRB 8.2 MB/s | 4.3 MB 00:00 2026-04-01T02:23:13.564 INFO:teuthology.orchestra.run.vm06.stdout:Rocky Linux 9 - CRB 8.0 MB/s | 4.3 MB 00:00 2026-04-01T02:23:13.830 INFO:teuthology.orchestra.run.vm08.stdout:Rocky Linux 9 - Extras 50 kB/s | 17 kB 00:00 2026-04-01T02:23:14.126 INFO:teuthology.orchestra.run.vm03.stdout:Rocky Linux 9 - Extras 49 kB/s | 17 kB 00:00 2026-04-01T02:23:14.451 INFO:teuthology.orchestra.run.vm06.stdout:Rocky Linux 9 - Extras 53 kB/s | 17 kB 00:00 2026-04-01T02:23:15.167 INFO:teuthology.orchestra.run.vm08.stdout:Package librados2-2:16.2.4-5.el9.x86_64 is already installed. 2026-04-01T02:23:15.167 INFO:teuthology.orchestra.run.vm08.stdout:Package librbd1-2:16.2.4-5.el9.x86_64 is already installed. 2026-04-01T02:23:15.199 INFO:teuthology.orchestra.run.vm08.stdout:Dependencies resolved. 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout:============================================================================================= 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout: Package Arch Version Repository Size 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout:============================================================================================= 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout:Installing: 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout: bzip2 x86_64 1.0.8-10.el9_5 baseos 51 k 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout: ceph x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 6.5 k 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout: ceph-base x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 5.9 M 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout: ceph-fuse x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 940 k 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout: ceph-immutable-object-cache x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 154 k 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 961 k 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-cephadm noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 173 k 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-dashboard noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 15 M 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-diskprediction-local noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 7.4 M 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-rook noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 50 k 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout: ceph-radosgw x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 24 M 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout: ceph-test x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 85 M 2026-04-01T02:23:15.206 INFO:teuthology.orchestra.run.vm08.stdout: ceph-volume noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 297 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: cephadm noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 1.0 M 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: libcephfs-devel x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 34 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: libcephfs2 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 868 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: librados-devel x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 126 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: perl-Test-Harness noarch 1:3.42-461.el9 appstream 267 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: python3-cephfs x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 163 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: python3-jmespath noarch 1.0.1-1.el9_7 appstream 43 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: python3-rados x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 317 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: python3-rbd x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 304 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: python3-rgw x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 99 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: python3-xmltodict noarch 0.12.0-15.el9 epel 22 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: rbd-fuse x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 91 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: rbd-mirror x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 2.9 M 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: rbd-nbd x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 180 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: s3cmd noarch 2.4.0-1.el9 epel 206 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout:Upgrading: 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: librados2 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 3.5 M 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: librbd1 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 2.8 M 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout:Installing dependencies: 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: abseil-cpp x86_64 20211102.0-4.el9 epel 551 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: boost-program-options x86_64 1.75.0-13.el9_7 appstream 104 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: c-ares x86_64 1.19.1-2.el9_4 baseos 110 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: ceph-common x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 24 M 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: ceph-grafana-dashboards noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 43 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mds x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 2.3 M 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-modules-core noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 289 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mon x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 5.0 M 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: ceph-osd x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 17 M 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: ceph-prometheus-alerts noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 17 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: ceph-selinux x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 25 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: cryptsetup x86_64 2.7.2-4.el9 baseos 310 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: flexiblas x86_64 3.0.4-8.el9.0.1 appstream 30 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: flexiblas-netlib x86_64 3.0.4-8.el9.0.1 appstream 3.0 M 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: flexiblas-openblas-openmp x86_64 3.0.4-8.el9.0.1 appstream 15 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: fuse x86_64 2.9.9-17.el9 baseos 78 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: gperftools-libs x86_64 2.9.1-3.el9 epel 308 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: grpc-data noarch 1.46.7-10.el9 epel 19 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: ledmon-libs x86_64 1.1.0-3.el9 baseos 41 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: libarrow x86_64 9.0.0-15.el9 epel 4.4 M 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: libarrow-doc noarch 9.0.0-15.el9 epel 25 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: libcephfs-proxy2 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 24 k 2026-04-01T02:23:15.207 INFO:teuthology.orchestra.run.vm08.stdout: libcephsqlite x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 164 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: libconfig x86_64 1.7.2-9.el9 baseos 71 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: libgfortran x86_64 11.5.0-11.el9 baseos 794 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: libnbd x86_64 1.20.3-4.el9 appstream 171 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: liboath x86_64 2.6.12-1.el9 epel 49 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: libpmemobj x86_64 1.12.1-1.el9 appstream 159 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: libquadmath x86_64 11.5.0-11.el9 baseos 184 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: librabbitmq x86_64 0.11.0-7.el9 appstream 44 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: libradosstriper1 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 250 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: librdkafka x86_64 1.6.1-102.el9 appstream 662 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: librgw2 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 6.4 M 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: libstoragemgmt x86_64 1.10.1-1.el9 appstream 243 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: libunwind x86_64 1.6.2-1.el9 epel 67 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: libxslt x86_64 1.1.34-13.el9_6 appstream 239 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: lmdb-libs x86_64 0.9.29-3.el9 baseos 60 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: lttng-ust x86_64 2.12.0-6.el9 appstream 282 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: lua x86_64 5.4.4-4.el9 appstream 187 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: lua-devel x86_64 5.4.4-4.el9 crb 21 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: luarocks noarch 3.9.2-5.el9 epel 151 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: mailcap noarch 2.1.49-5.el9.0.2 baseos 32 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: openblas x86_64 0.3.29-1.el9 appstream 41 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: openblas-openmp x86_64 0.3.29-1.el9 appstream 5.3 M 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: parquet-libs x86_64 9.0.0-15.el9 epel 838 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: pciutils x86_64 3.7.0-7.el9 baseos 92 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: perl-Benchmark noarch 1.23-481.1.el9_6 appstream 25 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: protobuf x86_64 3.14.0-17.el9_7 appstream 1.0 M 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: protobuf-compiler x86_64 3.14.0-17.el9_7 crb 862 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: python3-asyncssh noarch 2.13.2-5.el9 epel 548 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: python3-autocommand noarch 2.2.2-8.el9 epel 29 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: python3-babel noarch 2.9.1-2.el9 appstream 5.8 M 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: python3-backports-tarfile noarch 1.2.0-1.el9 epel 60 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: python3-bcrypt x86_64 3.2.2-1.el9 epel 43 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: python3-cachetools noarch 4.2.4-1.el9 epel 32 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: python3-ceph-argparse x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 45 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: python3-ceph-common x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 163 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: python3-certifi noarch 2023.05.07-4.el9 epel 14 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: python3-cffi x86_64 1.14.5-5.el9 baseos 241 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: python3-cheroot noarch 10.0.1-5.el9 epel 173 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: python3-cherrypy noarch 18.10.0-5.el9 epel 290 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: python3-cryptography x86_64 36.0.1-5.el9_6 baseos 1.2 M 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: python3-devel x86_64 3.9.23-2.el9 appstream 205 k 2026-04-01T02:23:15.208 INFO:teuthology.orchestra.run.vm08.stdout: python3-google-auth noarch 1:2.45.0-1.el9 epel 254 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-grpcio x86_64 1.46.7-10.el9 epel 2.0 M 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-grpcio-tools x86_64 1.46.7-10.el9 epel 144 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-isodate noarch 0.6.1-3.el9 epel 56 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco noarch 8.2.1-3.el9 epel 11 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-classes noarch 3.2.1-5.el9 epel 18 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-collections noarch 3.0.0-8.el9 epel 23 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-context noarch 6.0.1-3.el9 epel 20 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-functools noarch 3.5.0-2.el9 epel 19 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-text noarch 4.0.0-2.el9 epel 26 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-jinja2 noarch 2.11.3-8.el9_5 appstream 228 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-kubernetes noarch 1:26.1.0-3.el9 epel 1.0 M 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-libstoragemgmt x86_64 1.10.1-1.el9 appstream 166 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-lxml x86_64 4.6.5-3.el9 appstream 1.2 M 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-markupsafe x86_64 1.1.1-12.el9 appstream 32 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-more-itertools noarch 8.12.0-2.el9 epel 79 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-msgpack x86_64 1.0.3-2.el9 epel 86 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-natsort noarch 7.1.1-5.el9 epel 58 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-numpy x86_64 1:1.23.5-2.el9_7 appstream 5.8 M 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-numpy-f2py x86_64 1:1.23.5-2.el9_7 appstream 368 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-packaging noarch 20.9-5.el9 appstream 69 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-ply noarch 3.11-14.el9.0.1 baseos 103 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-portend noarch 3.1.0-2.el9 epel 16 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-protobuf noarch 3.14.0-17.el9_7 appstream 237 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-pyOpenSSL noarch 21.0.0-1.el9 epel 90 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-pyasn1 noarch 0.4.8-7.el9_7 appstream 132 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-pyasn1-modules noarch 0.4.8-7.el9_7 appstream 210 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-pycparser noarch 2.20-6.el9 baseos 124 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-pyparsing noarch 2.4.7-9.el9.0.1 baseos 150 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-repoze-lru noarch 0.7-16.el9 epel 31 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-requests noarch 2.25.1-10.el9_6 baseos 115 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-requests-oauthlib noarch 1.3.0-12.el9 appstream 43 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-routes noarch 2.5.1-5.el9 epel 188 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-rsa noarch 4.9-2.el9 epel 59 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-scipy x86_64 1.9.3-2.el9 appstream 19 M 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-tempora noarch 5.0.0-2.el9 epel 36 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-toml noarch 0.10.2-6.el9.0.1 appstream 44 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-typing-extensions noarch 4.15.0-1.el9 epel 86 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-urllib3 noarch 1.26.5-6.el9_7.1 baseos 191 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-websocket-client noarch 1.2.3-2.el9 epel 90 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-xmlsec x86_64 1.3.13-1.el9 epel 48 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: python3-zc-lockfile noarch 2.0-10.el9 epel 20 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: qatlib x86_64 24.09.0-1.el9 appstream 221 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: qatzip-libs x86_64 1.3.1-1.el9 appstream 65 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: re2 x86_64 1:20211101-20.el9 epel 191 k 2026-04-01T02:23:15.209 INFO:teuthology.orchestra.run.vm08.stdout: socat x86_64 1.7.4.1-8.el9 appstream 299 k 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout: thrift x86_64 0.15.0-4.el9 epel 1.6 M 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout: unzip x86_64 6.0-59.el9 baseos 180 k 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout: xmlsec1 x86_64 1.2.29-13.el9 appstream 188 k 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout: xmlsec1-openssl x86_64 1.2.29-13.el9 appstream 89 k 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout: xmlstarlet x86_64 1.6.1-20.el9 appstream 63 k 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout: zip x86_64 3.0-35.el9 baseos 263 k 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout:Installing weak dependencies: 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-k8sevents noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 22 k 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout: libcephfs-daemon x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 35 k 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout: nvme-cli x86_64 2.13-1.el9 baseos 1.0 M 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout: python3-influxdb noarch 5.3.1-1.el9 epel 139 k 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout: python3-saml noarch 1.16.0-1.el9 epel 125 k 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout: qatlib-service x86_64 24.09.0-1.el9 appstream 36 k 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout: smartmontools x86_64 1:7.2-9.el9 baseos 551 k 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout:Transaction Summary 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout:============================================================================================= 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout:Install 150 Packages 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout:Upgrade 2 Packages 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout:Total download size: 274 M 2026-04-01T02:23:15.210 INFO:teuthology.orchestra.run.vm08.stdout:Downloading Packages: 2026-04-01T02:23:15.493 INFO:teuthology.orchestra.run.vm03.stdout:Package librados2-2:16.2.4-5.el9.x86_64 is already installed. 2026-04-01T02:23:15.494 INFO:teuthology.orchestra.run.vm03.stdout:Package librbd1-2:16.2.4-5.el9.x86_64 is already installed. 2026-04-01T02:23:15.522 INFO:teuthology.orchestra.run.vm03.stdout:Dependencies resolved. 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout:============================================================================================= 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: Package Arch Version Repository Size 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout:============================================================================================= 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout:Installing: 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: bzip2 x86_64 1.0.8-10.el9_5 baseos 51 k 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: ceph x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 6.5 k 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: ceph-base x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 5.9 M 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: ceph-fuse x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 940 k 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: ceph-immutable-object-cache x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 154 k 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 961 k 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-cephadm noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 173 k 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-dashboard noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 15 M 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-diskprediction-local noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 7.4 M 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-rook noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 50 k 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: ceph-radosgw x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 24 M 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: ceph-test x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 85 M 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: ceph-volume noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 297 k 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: cephadm noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 1.0 M 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: libcephfs-devel x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 34 k 2026-04-01T02:23:15.528 INFO:teuthology.orchestra.run.vm03.stdout: libcephfs2 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 868 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: librados-devel x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 126 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: perl-Test-Harness noarch 1:3.42-461.el9 appstream 267 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: python3-cephfs x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 163 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: python3-jmespath noarch 1.0.1-1.el9_7 appstream 43 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: python3-rados x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 317 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: python3-rbd x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 304 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: python3-rgw x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 99 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: python3-xmltodict noarch 0.12.0-15.el9 epel 22 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: rbd-fuse x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 91 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: rbd-mirror x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 2.9 M 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: rbd-nbd x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 180 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: s3cmd noarch 2.4.0-1.el9 epel 206 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout:Upgrading: 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: librados2 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 3.5 M 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: librbd1 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 2.8 M 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout:Installing dependencies: 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: abseil-cpp x86_64 20211102.0-4.el9 epel 551 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: boost-program-options x86_64 1.75.0-13.el9_7 appstream 104 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: c-ares x86_64 1.19.1-2.el9_4 baseos 110 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: ceph-common x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 24 M 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: ceph-grafana-dashboards noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 43 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mds x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 2.3 M 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-modules-core noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 289 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mon x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 5.0 M 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: ceph-osd x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 17 M 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: ceph-prometheus-alerts noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 17 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: ceph-selinux x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 25 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: cryptsetup x86_64 2.7.2-4.el9 baseos 310 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: flexiblas x86_64 3.0.4-8.el9.0.1 appstream 30 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: flexiblas-netlib x86_64 3.0.4-8.el9.0.1 appstream 3.0 M 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: flexiblas-openblas-openmp x86_64 3.0.4-8.el9.0.1 appstream 15 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: fuse x86_64 2.9.9-17.el9 baseos 78 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: gperftools-libs x86_64 2.9.1-3.el9 epel 308 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: grpc-data noarch 1.46.7-10.el9 epel 19 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: ledmon-libs x86_64 1.1.0-3.el9 baseos 41 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: libarrow x86_64 9.0.0-15.el9 epel 4.4 M 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: libarrow-doc noarch 9.0.0-15.el9 epel 25 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: libcephfs-proxy2 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 24 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: libcephsqlite x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 164 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: libconfig x86_64 1.7.2-9.el9 baseos 71 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: libgfortran x86_64 11.5.0-11.el9 baseos 794 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: libnbd x86_64 1.20.3-4.el9 appstream 171 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: liboath x86_64 2.6.12-1.el9 epel 49 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: libpmemobj x86_64 1.12.1-1.el9 appstream 159 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: libquadmath x86_64 11.5.0-11.el9 baseos 184 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: librabbitmq x86_64 0.11.0-7.el9 appstream 44 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: libradosstriper1 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 250 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: librdkafka x86_64 1.6.1-102.el9 appstream 662 k 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: librgw2 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 6.4 M 2026-04-01T02:23:15.529 INFO:teuthology.orchestra.run.vm03.stdout: libstoragemgmt x86_64 1.10.1-1.el9 appstream 243 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: libunwind x86_64 1.6.2-1.el9 epel 67 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: libxslt x86_64 1.1.34-13.el9_6 appstream 239 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: lmdb-libs x86_64 0.9.29-3.el9 baseos 60 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: lttng-ust x86_64 2.12.0-6.el9 appstream 282 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: lua x86_64 5.4.4-4.el9 appstream 187 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: lua-devel x86_64 5.4.4-4.el9 crb 21 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: luarocks noarch 3.9.2-5.el9 epel 151 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: mailcap noarch 2.1.49-5.el9.0.2 baseos 32 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: openblas x86_64 0.3.29-1.el9 appstream 41 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: openblas-openmp x86_64 0.3.29-1.el9 appstream 5.3 M 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: parquet-libs x86_64 9.0.0-15.el9 epel 838 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: pciutils x86_64 3.7.0-7.el9 baseos 92 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: perl-Benchmark noarch 1.23-481.1.el9_6 appstream 25 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: protobuf x86_64 3.14.0-17.el9_7 appstream 1.0 M 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: protobuf-compiler x86_64 3.14.0-17.el9_7 crb 862 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-asyncssh noarch 2.13.2-5.el9 epel 548 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-autocommand noarch 2.2.2-8.el9 epel 29 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-babel noarch 2.9.1-2.el9 appstream 5.8 M 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-backports-tarfile noarch 1.2.0-1.el9 epel 60 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-bcrypt x86_64 3.2.2-1.el9 epel 43 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-cachetools noarch 4.2.4-1.el9 epel 32 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-ceph-argparse x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 45 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-ceph-common x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 163 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-certifi noarch 2023.05.07-4.el9 epel 14 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-cffi x86_64 1.14.5-5.el9 baseos 241 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-cheroot noarch 10.0.1-5.el9 epel 173 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-cherrypy noarch 18.10.0-5.el9 epel 290 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-cryptography x86_64 36.0.1-5.el9_6 baseos 1.2 M 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-devel x86_64 3.9.23-2.el9 appstream 205 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-google-auth noarch 1:2.45.0-1.el9 epel 254 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-grpcio x86_64 1.46.7-10.el9 epel 2.0 M 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-grpcio-tools x86_64 1.46.7-10.el9 epel 144 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-isodate noarch 0.6.1-3.el9 epel 56 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco noarch 8.2.1-3.el9 epel 11 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-classes noarch 3.2.1-5.el9 epel 18 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-collections noarch 3.0.0-8.el9 epel 23 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-context noarch 6.0.1-3.el9 epel 20 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-functools noarch 3.5.0-2.el9 epel 19 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-text noarch 4.0.0-2.el9 epel 26 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-jinja2 noarch 2.11.3-8.el9_5 appstream 228 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-kubernetes noarch 1:26.1.0-3.el9 epel 1.0 M 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-libstoragemgmt x86_64 1.10.1-1.el9 appstream 166 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-lxml x86_64 4.6.5-3.el9 appstream 1.2 M 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-markupsafe x86_64 1.1.1-12.el9 appstream 32 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-more-itertools noarch 8.12.0-2.el9 epel 79 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-msgpack x86_64 1.0.3-2.el9 epel 86 k 2026-04-01T02:23:15.530 INFO:teuthology.orchestra.run.vm03.stdout: python3-natsort noarch 7.1.1-5.el9 epel 58 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-numpy x86_64 1:1.23.5-2.el9_7 appstream 5.8 M 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-numpy-f2py x86_64 1:1.23.5-2.el9_7 appstream 368 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-packaging noarch 20.9-5.el9 appstream 69 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-ply noarch 3.11-14.el9.0.1 baseos 103 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-portend noarch 3.1.0-2.el9 epel 16 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-protobuf noarch 3.14.0-17.el9_7 appstream 237 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-pyOpenSSL noarch 21.0.0-1.el9 epel 90 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-pyasn1 noarch 0.4.8-7.el9_7 appstream 132 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-pyasn1-modules noarch 0.4.8-7.el9_7 appstream 210 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-pycparser noarch 2.20-6.el9 baseos 124 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-pyparsing noarch 2.4.7-9.el9.0.1 baseos 150 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-repoze-lru noarch 0.7-16.el9 epel 31 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-requests noarch 2.25.1-10.el9_6 baseos 115 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-requests-oauthlib noarch 1.3.0-12.el9 appstream 43 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-routes noarch 2.5.1-5.el9 epel 188 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-rsa noarch 4.9-2.el9 epel 59 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-scipy x86_64 1.9.3-2.el9 appstream 19 M 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-tempora noarch 5.0.0-2.el9 epel 36 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-toml noarch 0.10.2-6.el9.0.1 appstream 44 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-typing-extensions noarch 4.15.0-1.el9 epel 86 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-urllib3 noarch 1.26.5-6.el9_7.1 baseos 191 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-websocket-client noarch 1.2.3-2.el9 epel 90 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-xmlsec x86_64 1.3.13-1.el9 epel 48 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-zc-lockfile noarch 2.0-10.el9 epel 20 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: qatlib x86_64 24.09.0-1.el9 appstream 221 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: qatzip-libs x86_64 1.3.1-1.el9 appstream 65 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: re2 x86_64 1:20211101-20.el9 epel 191 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: socat x86_64 1.7.4.1-8.el9 appstream 299 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: thrift x86_64 0.15.0-4.el9 epel 1.6 M 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: unzip x86_64 6.0-59.el9 baseos 180 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: xmlsec1 x86_64 1.2.29-13.el9 appstream 188 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: xmlsec1-openssl x86_64 1.2.29-13.el9 appstream 89 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: xmlstarlet x86_64 1.6.1-20.el9 appstream 63 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: zip x86_64 3.0-35.el9 baseos 263 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout:Installing weak dependencies: 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-k8sevents noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 22 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: libcephfs-daemon x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 35 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: nvme-cli x86_64 2.13-1.el9 baseos 1.0 M 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-influxdb noarch 5.3.1-1.el9 epel 139 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: python3-saml noarch 1.16.0-1.el9 epel 125 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: qatlib-service x86_64 24.09.0-1.el9 appstream 36 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: smartmontools x86_64 1:7.2-9.el9 baseos 551 k 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout:Transaction Summary 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout:============================================================================================= 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout:Install 150 Packages 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout:Upgrade 2 Packages 2026-04-01T02:23:15.531 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:15.532 INFO:teuthology.orchestra.run.vm03.stdout:Total download size: 274 M 2026-04-01T02:23:15.532 INFO:teuthology.orchestra.run.vm03.stdout:Downloading Packages: 2026-04-01T02:23:15.692 INFO:teuthology.orchestra.run.vm06.stdout:Package librados2-2:16.2.4-5.el9.x86_64 is already installed. 2026-04-01T02:23:15.693 INFO:teuthology.orchestra.run.vm06.stdout:Package librbd1-2:16.2.4-5.el9.x86_64 is already installed. 2026-04-01T02:23:15.716 INFO:teuthology.orchestra.run.vm06.stdout:Dependencies resolved. 2026-04-01T02:23:15.721 INFO:teuthology.orchestra.run.vm06.stdout:============================================================================================= 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: Package Arch Version Repository Size 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout:============================================================================================= 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout:Installing: 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: bzip2 x86_64 1.0.8-10.el9_5 baseos 51 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: ceph x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 6.5 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: ceph-base x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 5.9 M 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: ceph-fuse x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 940 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: ceph-immutable-object-cache x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 154 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mgr x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 961 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mgr-cephadm noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 173 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mgr-dashboard noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 15 M 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mgr-diskprediction-local noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 7.4 M 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mgr-rook noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 50 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: ceph-radosgw x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 24 M 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: ceph-test x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 85 M 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: ceph-volume noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 297 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: cephadm noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 1.0 M 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: libcephfs-devel x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 34 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: libcephfs2 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 868 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: librados-devel x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 126 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: perl-Test-Harness noarch 1:3.42-461.el9 appstream 267 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: python3-cephfs x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 163 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: python3-jmespath noarch 1.0.1-1.el9_7 appstream 43 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: python3-rados x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 317 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: python3-rbd x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 304 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: python3-rgw x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 99 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: python3-xmltodict noarch 0.12.0-15.el9 epel 22 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: rbd-fuse x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 91 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: rbd-mirror x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 2.9 M 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: rbd-nbd x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 180 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: s3cmd noarch 2.4.0-1.el9 epel 206 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout:Upgrading: 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: librados2 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 3.5 M 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: librbd1 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 2.8 M 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout:Installing dependencies: 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: abseil-cpp x86_64 20211102.0-4.el9 epel 551 k 2026-04-01T02:23:15.722 INFO:teuthology.orchestra.run.vm06.stdout: boost-program-options x86_64 1.75.0-13.el9_7 appstream 104 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: c-ares x86_64 1.19.1-2.el9_4 baseos 110 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: ceph-common x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 24 M 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: ceph-grafana-dashboards noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 43 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mds x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 2.3 M 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mgr-modules-core noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 289 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mon x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 5.0 M 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: ceph-osd x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 17 M 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: ceph-prometheus-alerts noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 17 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: ceph-selinux x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 25 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: cryptsetup x86_64 2.7.2-4.el9 baseos 310 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: flexiblas x86_64 3.0.4-8.el9.0.1 appstream 30 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: flexiblas-netlib x86_64 3.0.4-8.el9.0.1 appstream 3.0 M 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: flexiblas-openblas-openmp x86_64 3.0.4-8.el9.0.1 appstream 15 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: fuse x86_64 2.9.9-17.el9 baseos 78 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: gperftools-libs x86_64 2.9.1-3.el9 epel 308 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: grpc-data noarch 1.46.7-10.el9 epel 19 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: ledmon-libs x86_64 1.1.0-3.el9 baseos 41 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: libarrow x86_64 9.0.0-15.el9 epel 4.4 M 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: libarrow-doc noarch 9.0.0-15.el9 epel 25 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: libcephfs-proxy2 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 24 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: libcephsqlite x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 164 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: libconfig x86_64 1.7.2-9.el9 baseos 71 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: libgfortran x86_64 11.5.0-11.el9 baseos 794 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: libnbd x86_64 1.20.3-4.el9 appstream 171 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: liboath x86_64 2.6.12-1.el9 epel 49 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: libpmemobj x86_64 1.12.1-1.el9 appstream 159 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: libquadmath x86_64 11.5.0-11.el9 baseos 184 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: librabbitmq x86_64 0.11.0-7.el9 appstream 44 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: libradosstriper1 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 250 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: librdkafka x86_64 1.6.1-102.el9 appstream 662 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: librgw2 x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 6.4 M 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: libstoragemgmt x86_64 1.10.1-1.el9 appstream 243 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: libunwind x86_64 1.6.2-1.el9 epel 67 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: libxslt x86_64 1.1.34-13.el9_6 appstream 239 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: lmdb-libs x86_64 0.9.29-3.el9 baseos 60 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: lttng-ust x86_64 2.12.0-6.el9 appstream 282 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: lua x86_64 5.4.4-4.el9 appstream 187 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: lua-devel x86_64 5.4.4-4.el9 crb 21 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: luarocks noarch 3.9.2-5.el9 epel 151 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: mailcap noarch 2.1.49-5.el9.0.2 baseos 32 k 2026-04-01T02:23:15.723 INFO:teuthology.orchestra.run.vm06.stdout: openblas x86_64 0.3.29-1.el9 appstream 41 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: openblas-openmp x86_64 0.3.29-1.el9 appstream 5.3 M 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: parquet-libs x86_64 9.0.0-15.el9 epel 838 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: pciutils x86_64 3.7.0-7.el9 baseos 92 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: perl-Benchmark noarch 1.23-481.1.el9_6 appstream 25 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: protobuf x86_64 3.14.0-17.el9_7 appstream 1.0 M 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: protobuf-compiler x86_64 3.14.0-17.el9_7 crb 862 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-asyncssh noarch 2.13.2-5.el9 epel 548 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-autocommand noarch 2.2.2-8.el9 epel 29 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-babel noarch 2.9.1-2.el9 appstream 5.8 M 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-backports-tarfile noarch 1.2.0-1.el9 epel 60 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-bcrypt x86_64 3.2.2-1.el9 epel 43 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-cachetools noarch 4.2.4-1.el9 epel 32 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-ceph-argparse x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 45 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-ceph-common x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 163 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-certifi noarch 2023.05.07-4.el9 epel 14 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-cffi x86_64 1.14.5-5.el9 baseos 241 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-cheroot noarch 10.0.1-5.el9 epel 173 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-cherrypy noarch 18.10.0-5.el9 epel 290 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-cryptography x86_64 36.0.1-5.el9_6 baseos 1.2 M 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-devel x86_64 3.9.23-2.el9 appstream 205 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-google-auth noarch 1:2.45.0-1.el9 epel 254 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-grpcio x86_64 1.46.7-10.el9 epel 2.0 M 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-grpcio-tools x86_64 1.46.7-10.el9 epel 144 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-isodate noarch 0.6.1-3.el9 epel 56 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-jaraco noarch 8.2.1-3.el9 epel 11 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-jaraco-classes noarch 3.2.1-5.el9 epel 18 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-jaraco-collections noarch 3.0.0-8.el9 epel 23 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-jaraco-context noarch 6.0.1-3.el9 epel 20 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-jaraco-functools noarch 3.5.0-2.el9 epel 19 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-jaraco-text noarch 4.0.0-2.el9 epel 26 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-jinja2 noarch 2.11.3-8.el9_5 appstream 228 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-kubernetes noarch 1:26.1.0-3.el9 epel 1.0 M 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-libstoragemgmt x86_64 1.10.1-1.el9 appstream 166 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-lxml x86_64 4.6.5-3.el9 appstream 1.2 M 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-markupsafe x86_64 1.1.1-12.el9 appstream 32 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-more-itertools noarch 8.12.0-2.el9 epel 79 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-msgpack x86_64 1.0.3-2.el9 epel 86 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-natsort noarch 7.1.1-5.el9 epel 58 k 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-numpy x86_64 1:1.23.5-2.el9_7 appstream 5.8 M 2026-04-01T02:23:15.724 INFO:teuthology.orchestra.run.vm06.stdout: python3-numpy-f2py x86_64 1:1.23.5-2.el9_7 appstream 368 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-packaging noarch 20.9-5.el9 appstream 69 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-ply noarch 3.11-14.el9.0.1 baseos 103 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-portend noarch 3.1.0-2.el9 epel 16 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-protobuf noarch 3.14.0-17.el9_7 appstream 237 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-pyOpenSSL noarch 21.0.0-1.el9 epel 90 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-pyasn1 noarch 0.4.8-7.el9_7 appstream 132 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-pyasn1-modules noarch 0.4.8-7.el9_7 appstream 210 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-pycparser noarch 2.20-6.el9 baseos 124 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-pyparsing noarch 2.4.7-9.el9.0.1 baseos 150 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-repoze-lru noarch 0.7-16.el9 epel 31 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-requests noarch 2.25.1-10.el9_6 baseos 115 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-requests-oauthlib noarch 1.3.0-12.el9 appstream 43 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-routes noarch 2.5.1-5.el9 epel 188 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-rsa noarch 4.9-2.el9 epel 59 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-scipy x86_64 1.9.3-2.el9 appstream 19 M 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-tempora noarch 5.0.0-2.el9 epel 36 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-toml noarch 0.10.2-6.el9.0.1 appstream 44 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-typing-extensions noarch 4.15.0-1.el9 epel 86 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-urllib3 noarch 1.26.5-6.el9_7.1 baseos 191 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-websocket-client noarch 1.2.3-2.el9 epel 90 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-xmlsec x86_64 1.3.13-1.el9 epel 48 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-zc-lockfile noarch 2.0-10.el9 epel 20 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: qatlib x86_64 24.09.0-1.el9 appstream 221 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: qatzip-libs x86_64 1.3.1-1.el9 appstream 65 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: re2 x86_64 1:20211101-20.el9 epel 191 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: socat x86_64 1.7.4.1-8.el9 appstream 299 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: thrift x86_64 0.15.0-4.el9 epel 1.6 M 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: unzip x86_64 6.0-59.el9 baseos 180 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: xmlsec1 x86_64 1.2.29-13.el9 appstream 188 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: xmlsec1-openssl x86_64 1.2.29-13.el9 appstream 89 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: xmlstarlet x86_64 1.6.1-20.el9 appstream 63 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: zip x86_64 3.0-35.el9 baseos 263 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout:Installing weak dependencies: 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mgr-k8sevents noarch 2:20.2.0-8.g0597158282e.el9.clyso ceph-noarch 22 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: libcephfs-daemon x86_64 2:20.2.0-8.g0597158282e.el9.clyso ceph 35 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: nvme-cli x86_64 2.13-1.el9 baseos 1.0 M 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-influxdb noarch 5.3.1-1.el9 epel 139 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: python3-saml noarch 1.16.0-1.el9 epel 125 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: qatlib-service x86_64 24.09.0-1.el9 appstream 36 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: smartmontools x86_64 1:7.2-9.el9 baseos 551 k 2026-04-01T02:23:15.725 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:15.726 INFO:teuthology.orchestra.run.vm06.stdout:Transaction Summary 2026-04-01T02:23:15.726 INFO:teuthology.orchestra.run.vm06.stdout:============================================================================================= 2026-04-01T02:23:15.726 INFO:teuthology.orchestra.run.vm06.stdout:Install 150 Packages 2026-04-01T02:23:15.726 INFO:teuthology.orchestra.run.vm06.stdout:Upgrade 2 Packages 2026-04-01T02:23:15.726 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:15.726 INFO:teuthology.orchestra.run.vm06.stdout:Total download size: 274 M 2026-04-01T02:23:15.726 INFO:teuthology.orchestra.run.vm06.stdout:Downloading Packages: 2026-04-01T02:23:15.887 INFO:teuthology.orchestra.run.vm08.stdout:(1/152): ceph-20.2.0-8.g0597158282e.el9.clyso.x 73 kB/s | 6.5 kB 00:00 2026-04-01T02:23:16.430 INFO:teuthology.orchestra.run.vm08.stdout:(2/152): ceph-fuse-20.2.0-8.g0597158282e.el9.cl 1.7 MB/s | 940 kB 00:00 2026-04-01T02:23:16.467 INFO:teuthology.orchestra.run.vm08.stdout:(3/152): ceph-immutable-object-cache-20.2.0-8.g 4.1 MB/s | 154 kB 00:00 2026-04-01T02:23:16.556 INFO:teuthology.orchestra.run.vm03.stdout:(1/152): ceph-20.2.0-8.g0597158282e.el9.clyso.x 64 kB/s | 6.5 kB 00:00 2026-04-01T02:23:16.703 INFO:teuthology.orchestra.run.vm08.stdout:(4/152): ceph-base-20.2.0-8.g0597158282e.el9.cl 6.5 MB/s | 5.9 MB 00:00 2026-04-01T02:23:16.712 INFO:teuthology.orchestra.run.vm08.stdout:(5/152): ceph-mds-20.2.0-8.g0597158282e.el9.cly 9.6 MB/s | 2.3 MB 00:00 2026-04-01T02:23:16.812 INFO:teuthology.orchestra.run.vm03.stdout:(2/152): ceph-fuse-20.2.0-8.g0597158282e.el9.cl 3.6 MB/s | 940 kB 00:00 2026-04-01T02:23:16.814 INFO:teuthology.orchestra.run.vm06.stdout:(1/152): ceph-20.2.0-8.g0597158282e.el9.clyso.x 64 kB/s | 6.5 kB 00:00 2026-04-01T02:23:16.818 INFO:teuthology.orchestra.run.vm08.stdout:(6/152): ceph-mgr-20.2.0-8.g0597158282e.el9.cly 8.2 MB/s | 961 kB 00:00 2026-04-01T02:23:16.935 INFO:teuthology.orchestra.run.vm03.stdout:(3/152): ceph-immutable-object-cache-20.2.0-8.g 1.2 MB/s | 154 kB 00:00 2026-04-01T02:23:17.075 INFO:teuthology.orchestra.run.vm06.stdout:(2/152): ceph-fuse-20.2.0-8.g0597158282e.el9.cl 3.5 MB/s | 940 kB 00:00 2026-04-01T02:23:17.172 INFO:teuthology.orchestra.run.vm06.stdout:(3/152): ceph-immutable-object-cache-20.2.0-8.g 1.6 MB/s | 154 kB 00:00 2026-04-01T02:23:17.248 INFO:teuthology.orchestra.run.vm08.stdout:(7/152): ceph-common-20.2.0-8.g0597158282e.el9. 16 MB/s | 24 MB 00:01 2026-04-01T02:23:17.278 INFO:teuthology.orchestra.run.vm03.stdout:(4/152): ceph-mds-20.2.0-8.g0597158282e.el9.cly 6.8 MB/s | 2.3 MB 00:00 2026-04-01T02:23:17.416 INFO:teuthology.orchestra.run.vm03.stdout:(5/152): ceph-common-20.2.0-8.g0597158282e.el9. 25 MB/s | 24 MB 00:00 2026-04-01T02:23:17.434 INFO:teuthology.orchestra.run.vm08.stdout:(8/152): ceph-mon-20.2.0-8.g0597158282e.el9.cly 7.0 MB/s | 5.0 MB 00:00 2026-04-01T02:23:17.484 INFO:teuthology.orchestra.run.vm08.stdout:(9/152): ceph-selinux-20.2.0-8.g0597158282e.el9 511 kB/s | 25 kB 00:00 2026-04-01T02:23:17.512 INFO:teuthology.orchestra.run.vm03.stdout:(6/152): ceph-base-20.2.0-8.g0597158282e.el9.cl 5.6 MB/s | 5.9 MB 00:01 2026-04-01T02:23:17.527 INFO:teuthology.orchestra.run.vm06.stdout:(4/152): ceph-mds-20.2.0-8.g0597158282e.el9.cly 6.6 MB/s | 2.3 MB 00:00 2026-04-01T02:23:17.697 INFO:teuthology.orchestra.run.vm03.stdout:(7/152): ceph-mgr-20.2.0-8.g0597158282e.el9.cly 2.2 MB/s | 961 kB 00:00 2026-04-01T02:23:17.708 INFO:teuthology.orchestra.run.vm06.stdout:(5/152): ceph-mgr-20.2.0-8.g0597158282e.el9.cly 5.2 MB/s | 961 kB 00:00 2026-04-01T02:23:18.176 INFO:teuthology.orchestra.run.vm08.stdout:(10/152): ceph-osd-20.2.0-8.g0597158282e.el9.cl 12 MB/s | 17 MB 00:01 2026-04-01T02:23:18.229 INFO:teuthology.orchestra.run.vm08.stdout:(11/152): libcephfs-daemon-20.2.0-8.g0597158282 674 kB/s | 35 kB 00:00 2026-04-01T02:23:18.275 INFO:teuthology.orchestra.run.vm08.stdout:(12/152): libcephfs-devel-20.2.0-8.g0597158282e 758 kB/s | 34 kB 00:00 2026-04-01T02:23:18.314 INFO:teuthology.orchestra.run.vm08.stdout:(13/152): libcephfs-proxy2-20.2.0-8.g0597158282 621 kB/s | 24 kB 00:00 2026-04-01T02:23:18.360 INFO:teuthology.orchestra.run.vm06.stdout:(6/152): ceph-base-20.2.0-8.g0597158282e.el9.cl 3.6 MB/s | 5.9 MB 00:01 2026-04-01T02:23:18.443 INFO:teuthology.orchestra.run.vm08.stdout:(14/152): libcephfs2-20.2.0-8.g0597158282e.el9. 6.6 MB/s | 868 kB 00:00 2026-04-01T02:23:18.517 INFO:teuthology.orchestra.run.vm08.stdout:(15/152): libcephsqlite-20.2.0-8.g0597158282e.e 2.2 MB/s | 164 kB 00:00 2026-04-01T02:23:18.635 INFO:teuthology.orchestra.run.vm03.stdout:(8/152): ceph-mon-20.2.0-8.g0597158282e.el9.cly 4.1 MB/s | 5.0 MB 00:01 2026-04-01T02:23:18.671 INFO:teuthology.orchestra.run.vm03.stdout:(9/152): ceph-selinux-20.2.0-8.g0597158282e.el9 706 kB/s | 25 kB 00:00 2026-04-01T02:23:18.714 INFO:teuthology.orchestra.run.vm08.stdout:(16/152): ceph-radosgw-20.2.0-8.g0597158282e.el 16 MB/s | 24 MB 00:01 2026-04-01T02:23:18.717 INFO:teuthology.orchestra.run.vm08.stdout:(17/152): librados-devel-20.2.0-8.g0597158282e. 628 kB/s | 126 kB 00:00 2026-04-01T02:23:18.777 INFO:teuthology.orchestra.run.vm08.stdout:(18/152): libradosstriper1-20.2.0-8.g0597158282 3.9 MB/s | 250 kB 00:00 2026-04-01T02:23:18.809 INFO:teuthology.orchestra.run.vm03.stdout:(10/152): ceph-osd-20.2.0-8.g0597158282e.el9.cl 13 MB/s | 17 MB 00:01 2026-04-01T02:23:18.855 INFO:teuthology.orchestra.run.vm03.stdout:(11/152): libcephfs-daemon-20.2.0-8.g0597158282 783 kB/s | 35 kB 00:00 2026-04-01T02:23:18.883 INFO:teuthology.orchestra.run.vm08.stdout:(19/152): python3-ceph-argparse-20.2.0-8.g05971 430 kB/s | 45 kB 00:00 2026-04-01T02:23:18.934 INFO:teuthology.orchestra.run.vm03.stdout:(12/152): libcephfs-devel-20.2.0-8.g0597158282e 439 kB/s | 34 kB 00:00 2026-04-01T02:23:18.953 INFO:teuthology.orchestra.run.vm08.stdout:(20/152): python3-ceph-common-20.2.0-8.g0597158 2.3 MB/s | 163 kB 00:00 2026-04-01T02:23:18.972 INFO:teuthology.orchestra.run.vm03.stdout:(13/152): libcephfs-proxy2-20.2.0-8.g0597158282 644 kB/s | 24 kB 00:00 2026-04-01T02:23:19.030 INFO:teuthology.orchestra.run.vm06.stdout:(7/152): ceph-common-20.2.0-8.g0597158282e.el9. 10 MB/s | 24 MB 00:02 2026-04-01T02:23:19.051 INFO:teuthology.orchestra.run.vm08.stdout:(21/152): python3-cephfs-20.2.0-8.g0597158282e. 1.6 MB/s | 163 kB 00:00 2026-04-01T02:23:19.105 INFO:teuthology.orchestra.run.vm03.stdout:(14/152): libcephfs2-20.2.0-8.g0597158282e.el9. 6.4 MB/s | 868 kB 00:00 2026-04-01T02:23:19.162 INFO:teuthology.orchestra.run.vm08.stdout:(22/152): python3-rados-20.2.0-8.g0597158282e.e 2.8 MB/s | 317 kB 00:00 2026-04-01T02:23:19.250 INFO:teuthology.orchestra.run.vm08.stdout:(23/152): librgw2-20.2.0-8.g0597158282e.el9.cly 12 MB/s | 6.4 MB 00:00 2026-04-01T02:23:19.263 INFO:teuthology.orchestra.run.vm08.stdout:(24/152): python3-rbd-20.2.0-8.g0597158282e.el9 3.0 MB/s | 304 kB 00:00 2026-04-01T02:23:19.295 INFO:teuthology.orchestra.run.vm08.stdout:(25/152): python3-rgw-20.2.0-8.g0597158282e.el9 2.2 MB/s | 99 kB 00:00 2026-04-01T02:23:19.332 INFO:teuthology.orchestra.run.vm03.stdout:(15/152): ceph-radosgw-20.2.0-8.g0597158282e.el 15 MB/s | 24 MB 00:01 2026-04-01T02:23:19.385 INFO:teuthology.orchestra.run.vm08.stdout:(26/152): rbd-fuse-20.2.0-8.g0597158282e.el9.cl 747 kB/s | 91 kB 00:00 2026-04-01T02:23:19.402 INFO:teuthology.orchestra.run.vm03.stdout:(16/152): libcephsqlite-20.2.0-8.g0597158282e.e 553 kB/s | 164 kB 00:00 2026-04-01T02:23:19.413 INFO:teuthology.orchestra.run.vm03.stdout:(17/152): librados-devel-20.2.0-8.g0597158282e. 1.5 MB/s | 126 kB 00:00 2026-04-01T02:23:19.472 INFO:teuthology.orchestra.run.vm03.stdout:(18/152): libradosstriper1-20.2.0-8.g0597158282 3.5 MB/s | 250 kB 00:00 2026-04-01T02:23:19.478 INFO:teuthology.orchestra.run.vm08.stdout:(27/152): rbd-nbd-20.2.0-8.g0597158282e.el9.cly 1.9 MB/s | 180 kB 00:00 2026-04-01T02:23:19.528 INFO:teuthology.orchestra.run.vm03.stdout:(19/152): python3-ceph-argparse-20.2.0-8.g05971 803 kB/s | 45 kB 00:00 2026-04-01T02:23:19.615 INFO:teuthology.orchestra.run.vm03.stdout:(20/152): python3-ceph-common-20.2.0-8.g0597158 1.9 MB/s | 163 kB 00:00 2026-04-01T02:23:19.628 INFO:teuthology.orchestra.run.vm06.stdout:(8/152): ceph-mon-20.2.0-8.g0597158282e.el9.cly 2.6 MB/s | 5.0 MB 00:01 2026-04-01T02:23:19.689 INFO:teuthology.orchestra.run.vm06.stdout:(9/152): ceph-osd-20.2.0-8.g0597158282e.el9.cly 13 MB/s | 17 MB 00:01 2026-04-01T02:23:19.689 INFO:teuthology.orchestra.run.vm08.stdout:(28/152): rbd-mirror-20.2.0-8.g0597158282e.el9. 7.4 MB/s | 2.9 MB 00:00 2026-04-01T02:23:19.690 INFO:teuthology.orchestra.run.vm06.stdout:(10/152): ceph-selinux-20.2.0-8.g0597158282e.el 406 kB/s | 25 kB 00:00 2026-04-01T02:23:19.745 INFO:teuthology.orchestra.run.vm06.stdout:(11/152): libcephfs-daemon-20.2.0-8.g0597158282 654 kB/s | 35 kB 00:00 2026-04-01T02:23:19.796 INFO:teuthology.orchestra.run.vm06.stdout:(12/152): libcephfs-devel-20.2.0-8.g0597158282e 680 kB/s | 34 kB 00:00 2026-04-01T02:23:19.859 INFO:teuthology.orchestra.run.vm06.stdout:(13/152): libcephfs-proxy2-20.2.0-8.g0597158282 384 kB/s | 24 kB 00:00 2026-04-01T02:23:20.119 INFO:teuthology.orchestra.run.vm06.stdout:(14/152): libcephfs2-20.2.0-8.g0597158282e.el9. 3.3 MB/s | 868 kB 00:00 2026-04-01T02:23:20.163 INFO:teuthology.orchestra.run.vm03.stdout:(21/152): librgw2-20.2.0-8.g0597158282e.el9.cly 8.5 MB/s | 6.4 MB 00:00 2026-04-01T02:23:20.249 INFO:teuthology.orchestra.run.vm03.stdout:(22/152): python3-rados-20.2.0-8.g0597158282e.e 3.6 MB/s | 317 kB 00:00 2026-04-01T02:23:20.256 INFO:teuthology.orchestra.run.vm06.stdout:(15/152): libcephsqlite-20.2.0-8.g0597158282e.e 1.2 MB/s | 164 kB 00:00 2026-04-01T02:23:20.306 INFO:teuthology.orchestra.run.vm08.stdout:(29/152): ceph-mgr-cephadm-20.2.0-8.g0597158282 280 kB/s | 173 kB 00:00 2026-04-01T02:23:20.355 INFO:teuthology.orchestra.run.vm03.stdout:(23/152): python3-rbd-20.2.0-8.g0597158282e.el9 2.8 MB/s | 304 kB 00:00 2026-04-01T02:23:20.360 INFO:teuthology.orchestra.run.vm06.stdout:(16/152): librados-devel-20.2.0-8.g0597158282e. 1.2 MB/s | 126 kB 00:00 2026-04-01T02:23:20.425 INFO:teuthology.orchestra.run.vm06.stdout:(17/152): libradosstriper1-20.2.0-8.g0597158282 3.8 MB/s | 250 kB 00:00 2026-04-01T02:23:20.454 INFO:teuthology.orchestra.run.vm03.stdout:(24/152): python3-cephfs-20.2.0-8.g0597158282e. 195 kB/s | 163 kB 00:00 2026-04-01T02:23:20.491 INFO:teuthology.orchestra.run.vm03.stdout:(25/152): python3-rgw-20.2.0-8.g0597158282e.el9 733 kB/s | 99 kB 00:00 2026-04-01T02:23:20.587 INFO:teuthology.orchestra.run.vm03.stdout:(26/152): rbd-fuse-20.2.0-8.g0597158282e.el9.cl 682 kB/s | 91 kB 00:00 2026-04-01T02:23:20.823 INFO:teuthology.orchestra.run.vm08.stdout:(30/152): ceph-grafana-dashboards-20.2.0-8.g059 32 kB/s | 43 kB 00:01 2026-04-01T02:23:20.996 INFO:teuthology.orchestra.run.vm06.stdout:(18/152): ceph-radosgw-20.2.0-8.g0597158282e.el 12 MB/s | 24 MB 00:01 2026-04-01T02:23:21.054 INFO:teuthology.orchestra.run.vm06.stdout:(19/152): python3-ceph-argparse-20.2.0-8.g05971 789 kB/s | 45 kB 00:00 2026-04-01T02:23:21.070 INFO:teuthology.orchestra.run.vm03.stdout:(27/152): rbd-nbd-20.2.0-8.g0597158282e.el9.cly 372 kB/s | 180 kB 00:00 2026-04-01T02:23:21.142 INFO:teuthology.orchestra.run.vm06.stdout:(20/152): python3-ceph-common-20.2.0-8.g0597158 1.8 MB/s | 163 kB 00:00 2026-04-01T02:23:21.215 INFO:teuthology.orchestra.run.vm06.stdout:(21/152): python3-cephfs-20.2.0-8.g0597158282e. 2.2 MB/s | 163 kB 00:00 2026-04-01T02:23:21.311 INFO:teuthology.orchestra.run.vm06.stdout:(22/152): python3-rados-20.2.0-8.g0597158282e.e 3.2 MB/s | 317 kB 00:00 2026-04-01T02:23:21.427 INFO:teuthology.orchestra.run.vm03.stdout:(28/152): ceph-grafana-dashboards-20.2.0-8.g059 121 kB/s | 43 kB 00:00 2026-04-01T02:23:21.458 INFO:teuthology.orchestra.run.vm06.stdout:(23/152): librgw2-20.2.0-8.g0597158282e.el9.cly 6.2 MB/s | 6.4 MB 00:01 2026-04-01T02:23:21.476 INFO:teuthology.orchestra.run.vm06.stdout:(24/152): python3-rbd-20.2.0-8.g0597158282e.el9 1.8 MB/s | 304 kB 00:00 2026-04-01T02:23:21.497 INFO:teuthology.orchestra.run.vm03.stdout:(29/152): ceph-mgr-cephadm-20.2.0-8.g0597158282 2.4 MB/s | 173 kB 00:00 2026-04-01T02:23:21.505 INFO:teuthology.orchestra.run.vm06.stdout:(25/152): python3-rgw-20.2.0-8.g0597158282e.el9 2.1 MB/s | 99 kB 00:00 2026-04-01T02:23:21.518 INFO:teuthology.orchestra.run.vm06.stdout:(26/152): rbd-fuse-20.2.0-8.g0597158282e.el9.cl 2.2 MB/s | 91 kB 00:00 2026-04-01T02:23:21.685 INFO:teuthology.orchestra.run.vm06.stdout:(27/152): rbd-nbd-20.2.0-8.g0597158282e.el9.cly 1.1 MB/s | 180 kB 00:00 2026-04-01T02:23:21.696 INFO:teuthology.orchestra.run.vm03.stdout:(30/152): rbd-mirror-20.2.0-8.g0597158282e.el9. 2.4 MB/s | 2.9 MB 00:01 2026-04-01T02:23:21.718 INFO:teuthology.orchestra.run.vm06.stdout:(28/152): ceph-grafana-dashboards-20.2.0-8.g059 1.3 MB/s | 43 kB 00:00 2026-04-01T02:23:21.787 INFO:teuthology.orchestra.run.vm06.stdout:(29/152): ceph-mgr-cephadm-20.2.0-8.g0597158282 2.4 MB/s | 173 kB 00:00 2026-04-01T02:23:22.358 INFO:teuthology.orchestra.run.vm06.stdout:(30/152): rbd-mirror-20.2.0-8.g0597158282e.el9. 3.4 MB/s | 2.9 MB 00:00 2026-04-01T02:23:22.478 INFO:teuthology.orchestra.run.vm03.stdout:(31/152): ceph-mgr-diskprediction-local-20.2.0- 9.5 MB/s | 7.4 MB 00:00 2026-04-01T02:23:22.511 INFO:teuthology.orchestra.run.vm03.stdout:(32/152): ceph-mgr-k8sevents-20.2.0-8.g05971582 686 kB/s | 22 kB 00:00 2026-04-01T02:23:22.580 INFO:teuthology.orchestra.run.vm03.stdout:(33/152): ceph-mgr-modules-core-20.2.0-8.g05971 4.1 MB/s | 289 kB 00:00 2026-04-01T02:23:22.660 INFO:teuthology.orchestra.run.vm03.stdout:(34/152): ceph-mgr-rook-20.2.0-8.g0597158282e.e 628 kB/s | 50 kB 00:00 2026-04-01T02:23:22.774 INFO:teuthology.orchestra.run.vm03.stdout:(35/152): ceph-prometheus-alerts-20.2.0-8.g0597 150 kB/s | 17 kB 00:00 2026-04-01T02:23:22.774 INFO:teuthology.orchestra.run.vm08.stdout:(31/152): ceph-test-20.2.0-8.g0597158282e.el9.c 16 MB/s | 85 MB 00:05 2026-04-01T02:23:22.816 INFO:teuthology.orchestra.run.vm08.stdout:(32/152): ceph-mgr-k8sevents-20.2.0-8.g05971582 544 kB/s | 22 kB 00:00 2026-04-01T02:23:22.819 INFO:teuthology.orchestra.run.vm03.stdout:(36/152): ceph-volume-20.2.0-8.g0597158282e.el9 6.5 MB/s | 297 kB 00:00 2026-04-01T02:23:22.890 INFO:teuthology.orchestra.run.vm08.stdout:(33/152): ceph-mgr-modules-core-20.2.0-8.g05971 3.8 MB/s | 289 kB 00:00 2026-04-01T02:23:22.989 INFO:teuthology.orchestra.run.vm03.stdout:(37/152): cephadm-20.2.0-8.g0597158282e.el9.cly 5.8 MB/s | 1.0 MB 00:00 2026-04-01T02:23:23.038 INFO:teuthology.orchestra.run.vm08.stdout:(34/152): ceph-mgr-rook-20.2.0-8.g0597158282e.e 341 kB/s | 50 kB 00:00 2026-04-01T02:23:23.080 INFO:teuthology.orchestra.run.vm08.stdout:(35/152): ceph-prometheus-alerts-20.2.0-8.g0597 409 kB/s | 17 kB 00:00 2026-04-01T02:23:23.081 INFO:teuthology.orchestra.run.vm06.stdout:(31/152): ceph-mgr-dashboard-20.2.0-8.g05971582 12 MB/s | 15 MB 00:01 2026-04-01T02:23:23.142 INFO:teuthology.orchestra.run.vm06.stdout:(32/152): ceph-mgr-k8sevents-20.2.0-8.g05971582 363 kB/s | 22 kB 00:00 2026-04-01T02:23:23.168 INFO:teuthology.orchestra.run.vm08.stdout:(36/152): ceph-volume-20.2.0-8.g0597158282e.el9 3.3 MB/s | 297 kB 00:00 2026-04-01T02:23:23.186 INFO:teuthology.orchestra.run.vm06.stdout:(33/152): ceph-mgr-modules-core-20.2.0-8.g05971 6.5 MB/s | 289 kB 00:00 2026-04-01T02:23:23.253 INFO:teuthology.orchestra.run.vm06.stdout:(34/152): ceph-mgr-diskprediction-local-20.2.0- 8.3 MB/s | 7.4 MB 00:00 2026-04-01T02:23:23.257 INFO:teuthology.orchestra.run.vm06.stdout:(35/152): ceph-mgr-rook-20.2.0-8.g0597158282e.e 705 kB/s | 50 kB 00:00 2026-04-01T02:23:23.285 INFO:teuthology.orchestra.run.vm06.stdout:(36/152): ceph-prometheus-alerts-20.2.0-8.g0597 528 kB/s | 17 kB 00:00 2026-04-01T02:23:23.308 INFO:teuthology.orchestra.run.vm06.stdout:(37/152): ceph-volume-20.2.0-8.g0597158282e.el9 5.7 MB/s | 297 kB 00:00 2026-04-01T02:23:23.351 INFO:teuthology.orchestra.run.vm08.stdout:(37/152): cephadm-20.2.0-8.g0597158282e.el9.cly 5.4 MB/s | 1.0 MB 00:00 2026-04-01T02:23:23.425 INFO:teuthology.orchestra.run.vm06.stdout:(38/152): cephadm-20.2.0-8.g0597158282e.el9.cly 7.1 MB/s | 1.0 MB 00:00 2026-04-01T02:23:23.519 INFO:teuthology.orchestra.run.vm06.stdout:(39/152): abseil-cpp-20211102.0-4.el9.x86_64.rp 2.6 MB/s | 551 kB 00:00 2026-04-01T02:23:23.526 INFO:teuthology.orchestra.run.vm03.stdout:(38/152): abseil-cpp-20211102.0-4.el9.x86_64.rp 1.0 MB/s | 551 kB 00:00 2026-04-01T02:23:23.561 INFO:teuthology.orchestra.run.vm06.stdout:(40/152): grpc-data-1.46.7-10.el9.noarch.rpm 462 kB/s | 19 kB 00:00 2026-04-01T02:23:23.561 INFO:teuthology.orchestra.run.vm08.stdout:(38/152): abseil-cpp-20211102.0-4.el9.x86_64.rp 2.6 MB/s | 551 kB 00:00 2026-04-01T02:23:23.599 INFO:teuthology.orchestra.run.vm03.stdout:(39/152): gperftools-libs-2.9.1-3.el9.x86_64.rp 4.1 MB/s | 308 kB 00:00 2026-04-01T02:23:23.609 INFO:teuthology.orchestra.run.vm06.stdout:(41/152): gperftools-libs-2.9.1-3.el9.x86_64.rp 1.6 MB/s | 308 kB 00:00 2026-04-01T02:23:23.629 INFO:teuthology.orchestra.run.vm08.stdout:(39/152): gperftools-libs-2.9.1-3.el9.x86_64.rp 4.4 MB/s | 308 kB 00:00 2026-04-01T02:23:23.660 INFO:teuthology.orchestra.run.vm06.stdout:(42/152): libarrow-doc-9.0.0-15.el9.noarch.rpm 494 kB/s | 25 kB 00:00 2026-04-01T02:23:23.667 INFO:teuthology.orchestra.run.vm03.stdout:(40/152): grpc-data-1.46.7-10.el9.noarch.rpm 288 kB/s | 19 kB 00:00 2026-04-01T02:23:23.688 INFO:teuthology.orchestra.run.vm08.stdout:(40/152): grpc-data-1.46.7-10.el9.noarch.rpm 335 kB/s | 19 kB 00:00 2026-04-01T02:23:23.910 INFO:teuthology.orchestra.run.vm06.stdout:(43/152): ceph-test-20.2.0-8.g0597158282e.el9.c 20 MB/s | 85 MB 00:04 2026-04-01T02:23:23.911 INFO:teuthology.orchestra.run.vm08.stdout:(41/152): libarrow-9.0.0-15.el9.x86_64.rpm 20 MB/s | 4.4 MB 00:00 2026-04-01T02:23:23.912 INFO:teuthology.orchestra.run.vm06.stdout:(44/152): liboath-2.6.12-1.el9.x86_64.rpm 194 kB/s | 49 kB 00:00 2026-04-01T02:23:23.930 INFO:teuthology.orchestra.run.vm06.stdout:(45/152): libarrow-9.0.0-15.el9.x86_64.rpm 12 MB/s | 4.4 MB 00:00 2026-04-01T02:23:23.953 INFO:teuthology.orchestra.run.vm06.stdout:(46/152): luarocks-3.9.2-5.el9.noarch.rpm 3.7 MB/s | 151 kB 00:00 2026-04-01T02:23:23.963 INFO:teuthology.orchestra.run.vm08.stdout:(42/152): libarrow-doc-9.0.0-15.el9.noarch.rpm 472 kB/s | 25 kB 00:00 2026-04-01T02:23:24.000 INFO:teuthology.orchestra.run.vm06.stdout:(47/152): parquet-libs-9.0.0-15.el9.x86_64.rpm 12 MB/s | 838 kB 00:00 2026-04-01T02:23:24.008 INFO:teuthology.orchestra.run.vm03.stdout:(41/152): libarrow-9.0.0-15.el9.x86_64.rpm 13 MB/s | 4.4 MB 00:00 2026-04-01T02:23:24.023 INFO:teuthology.orchestra.run.vm08.stdout:(43/152): liboath-2.6.12-1.el9.x86_64.rpm 820 kB/s | 49 kB 00:00 2026-04-01T02:23:24.025 INFO:teuthology.orchestra.run.vm06.stdout:(48/152): libunwind-1.6.2-1.el9.x86_64.rpm 587 kB/s | 67 kB 00:00 2026-04-01T02:23:24.031 INFO:teuthology.orchestra.run.vm06.stdout:(49/152): python3-asyncssh-2.13.2-5.el9.noarch. 6.8 MB/s | 548 kB 00:00 2026-04-01T02:23:24.040 INFO:teuthology.orchestra.run.vm06.stdout:(50/152): python3-autocommand-2.2.2-8.el9.noarc 726 kB/s | 29 kB 00:00 2026-04-01T02:23:24.065 INFO:teuthology.orchestra.run.vm06.stdout:(51/152): python3-backports-tarfile-1.2.0-1.el9 1.5 MB/s | 60 kB 00:00 2026-04-01T02:23:24.074 INFO:teuthology.orchestra.run.vm06.stdout:(52/152): python3-bcrypt-3.2.2-1.el9.x86_64.rpm 1.0 MB/s | 43 kB 00:00 2026-04-01T02:23:24.074 INFO:teuthology.orchestra.run.vm03.stdout:(42/152): libarrow-doc-9.0.0-15.el9.noarch.rpm 373 kB/s | 25 kB 00:00 2026-04-01T02:23:24.080 INFO:teuthology.orchestra.run.vm06.stdout:(53/152): python3-cachetools-4.2.4-1.el9.noarch 807 kB/s | 32 kB 00:00 2026-04-01T02:23:24.081 INFO:teuthology.orchestra.run.vm08.stdout:(44/152): libunwind-1.6.2-1.el9.x86_64.rpm 1.1 MB/s | 67 kB 00:00 2026-04-01T02:23:24.112 INFO:teuthology.orchestra.run.vm06.stdout:(54/152): python3-certifi-2023.05.07-4.el9.noar 298 kB/s | 14 kB 00:00 2026-04-01T02:23:24.116 INFO:teuthology.orchestra.run.vm06.stdout:(55/152): python3-cheroot-10.0.1-5.el9.noarch.r 4.1 MB/s | 173 kB 00:00 2026-04-01T02:23:24.134 INFO:teuthology.orchestra.run.vm06.stdout:(56/152): python3-cherrypy-18.10.0-5.el9.noarch 5.4 MB/s | 290 kB 00:00 2026-04-01T02:23:24.141 INFO:teuthology.orchestra.run.vm08.stdout:(45/152): luarocks-3.9.2-5.el9.noarch.rpm 2.5 MB/s | 151 kB 00:00 2026-04-01T02:23:24.141 INFO:teuthology.orchestra.run.vm03.stdout:(43/152): liboath-2.6.12-1.el9.x86_64.rpm 733 kB/s | 49 kB 00:00 2026-04-01T02:23:24.184 INFO:teuthology.orchestra.run.vm06.stdout:(57/152): python3-google-auth-2.45.0-1.el9.noar 3.5 MB/s | 254 kB 00:00 2026-04-01T02:23:24.203 INFO:teuthology.orchestra.run.vm06.stdout:(58/152): python3-grpcio-tools-1.46.7-10.el9.x8 2.0 MB/s | 144 kB 00:00 2026-04-01T02:23:24.224 INFO:teuthology.orchestra.run.vm06.stdout:(59/152): python3-influxdb-5.3.1-1.el9.noarch.r 3.5 MB/s | 139 kB 00:00 2026-04-01T02:23:24.242 INFO:teuthology.orchestra.run.vm06.stdout:(60/152): python3-isodate-0.6.1-3.el9.noarch.rp 1.4 MB/s | 56 kB 00:00 2026-04-01T02:23:24.250 INFO:teuthology.orchestra.run.vm06.stdout:(61/152): python3-grpcio-1.46.7-10.el9.x86_64.r 15 MB/s | 2.0 MB 00:00 2026-04-01T02:23:24.253 INFO:teuthology.orchestra.run.vm06.stdout:(62/152): python3-jaraco-8.2.1-3.el9.noarch.rpm 372 kB/s | 11 kB 00:00 2026-04-01T02:23:24.285 INFO:teuthology.orchestra.run.vm06.stdout:(63/152): python3-jaraco-classes-3.2.1-5.el9.no 410 kB/s | 18 kB 00:00 2026-04-01T02:23:24.287 INFO:teuthology.orchestra.run.vm06.stdout:(64/152): python3-jaraco-collections-3.0.0-8.el 630 kB/s | 23 kB 00:00 2026-04-01T02:23:24.287 INFO:teuthology.orchestra.run.vm08.stdout:(46/152): ceph-mgr-dashboard-20.2.0-8.g05971582 3.8 MB/s | 15 MB 00:03 2026-04-01T02:23:24.288 INFO:teuthology.orchestra.run.vm06.stdout:(65/152): python3-jaraco-context-6.0.1-3.el9.no 552 kB/s | 20 kB 00:00 2026-04-01T02:23:24.291 INFO:teuthology.orchestra.run.vm08.stdout:(47/152): parquet-libs-9.0.0-15.el9.x86_64.rpm 5.5 MB/s | 838 kB 00:00 2026-04-01T02:23:24.316 INFO:teuthology.orchestra.run.vm06.stdout:(66/152): python3-jaraco-functools-3.5.0-2.el9. 641 kB/s | 19 kB 00:00 2026-04-01T02:23:24.332 INFO:teuthology.orchestra.run.vm06.stdout:(67/152): python3-jaraco-text-4.0.0-2.el9.noarc 594 kB/s | 26 kB 00:00 2026-04-01T02:23:24.347 INFO:teuthology.orchestra.run.vm08.stdout:(48/152): python3-autocommand-2.2.2-8.el9.noarc 530 kB/s | 29 kB 00:00 2026-04-01T02:23:24.347 INFO:teuthology.orchestra.run.vm06.stdout:(68/152): python3-more-itertools-8.12.0-2.el9.n 2.5 MB/s | 79 kB 00:00 2026-04-01T02:23:24.353 INFO:teuthology.orchestra.run.vm08.stdout:(49/152): python3-asyncssh-2.13.2-5.el9.noarch. 8.3 MB/s | 548 kB 00:00 2026-04-01T02:23:24.364 INFO:teuthology.orchestra.run.vm06.stdout:(69/152): python3-msgpack-1.0.3-2.el9.x86_64.rp 2.7 MB/s | 86 kB 00:00 2026-04-01T02:23:24.376 INFO:teuthology.orchestra.run.vm06.stdout:(70/152): python3-kubernetes-26.1.0-3.el9.noarc 12 MB/s | 1.0 MB 00:00 2026-04-01T02:23:24.378 INFO:teuthology.orchestra.run.vm06.stdout:(71/152): python3-natsort-7.1.1-5.el9.noarch.rp 1.9 MB/s | 58 kB 00:00 2026-04-01T02:23:24.399 INFO:teuthology.orchestra.run.vm06.stdout:(72/152): python3-portend-3.1.0-2.el9.noarch.rp 467 kB/s | 16 kB 00:00 2026-04-01T02:23:24.404 INFO:teuthology.orchestra.run.vm08.stdout:(50/152): python3-backports-tarfile-1.2.0-1.el9 1.0 MB/s | 60 kB 00:00 2026-04-01T02:23:24.405 INFO:teuthology.orchestra.run.vm08.stdout:(51/152): python3-bcrypt-3.2.2-1.el9.x86_64.rpm 830 kB/s | 43 kB 00:00 2026-04-01T02:23:24.419 INFO:teuthology.orchestra.run.vm06.stdout:(73/152): python3-pyOpenSSL-21.0.0-1.el9.noarch 2.1 MB/s | 90 kB 00:00 2026-04-01T02:23:24.422 INFO:teuthology.orchestra.run.vm06.stdout:(74/152): python3-repoze-lru-0.7-16.el9.noarch. 696 kB/s | 31 kB 00:00 2026-04-01T02:23:24.446 INFO:teuthology.orchestra.run.vm06.stdout:(75/152): python3-routes-2.5.1-5.el9.noarch.rpm 4.0 MB/s | 188 kB 00:00 2026-04-01T02:23:24.458 INFO:teuthology.orchestra.run.vm08.stdout:(52/152): python3-certifi-2023.05.07-4.el9.noar 271 kB/s | 14 kB 00:00 2026-04-01T02:23:24.458 INFO:teuthology.orchestra.run.vm08.stdout:(53/152): python3-cachetools-4.2.4-1.el9.noarch 596 kB/s | 32 kB 00:00 2026-04-01T02:23:24.460 INFO:teuthology.orchestra.run.vm06.stdout:(76/152): python3-rsa-4.9-2.el9.noarch.rpm 1.4 MB/s | 59 kB 00:00 2026-04-01T02:23:24.475 INFO:teuthology.orchestra.run.vm06.stdout:(77/152): python3-saml-1.16.0-1.el9.noarch.rpm 2.3 MB/s | 125 kB 00:00 2026-04-01T02:23:24.486 INFO:teuthology.orchestra.run.vm06.stdout:(78/152): python3-tempora-5.0.0-2.el9.noarch.rp 900 kB/s | 36 kB 00:00 2026-04-01T02:23:24.503 INFO:teuthology.orchestra.run.vm06.stdout:(79/152): python3-typing-extensions-4.15.0-1.el 2.0 MB/s | 86 kB 00:00 2026-04-01T02:23:24.506 INFO:teuthology.orchestra.run.vm06.stdout:(80/152): python3-websocket-client-1.2.3-2.el9. 2.9 MB/s | 90 kB 00:00 2026-04-01T02:23:24.517 INFO:teuthology.orchestra.run.vm08.stdout:(54/152): python3-cherrypy-18.10.0-5.el9.noarch 4.9 MB/s | 290 kB 00:00 2026-04-01T02:23:24.521 INFO:teuthology.orchestra.run.vm06.stdout:(81/152): python3-xmlsec-1.3.13-1.el9.x86_64.rp 1.4 MB/s | 48 kB 00:00 2026-04-01T02:23:24.528 INFO:teuthology.orchestra.run.vm08.stdout:(55/152): python3-cheroot-10.0.1-5.el9.noarch.r 2.4 MB/s | 173 kB 00:00 2026-04-01T02:23:24.532 INFO:teuthology.orchestra.run.vm06.stdout:(82/152): python3-xmltodict-0.12.0-15.el9.noarc 755 kB/s | 22 kB 00:00 2026-04-01T02:23:24.535 INFO:teuthology.orchestra.run.vm06.stdout:(83/152): python3-zc-lockfile-2.0-10.el9.noarch 684 kB/s | 20 kB 00:00 2026-04-01T02:23:24.565 INFO:teuthology.orchestra.run.vm06.stdout:(84/152): re2-20211101-20.el9.x86_64.rpm 4.3 MB/s | 191 kB 00:00 2026-04-01T02:23:24.575 INFO:teuthology.orchestra.run.vm08.stdout:(56/152): python3-google-auth-2.45.0-1.el9.noar 4.3 MB/s | 254 kB 00:00 2026-04-01T02:23:24.578 INFO:teuthology.orchestra.run.vm06.stdout:(85/152): s3cmd-2.4.0-1.el9.noarch.rpm 4.5 MB/s | 206 kB 00:00 2026-04-01T02:23:24.614 INFO:teuthology.orchestra.run.vm03.stdout:(44/152): libunwind-1.6.2-1.el9.x86_64.rpm 143 kB/s | 67 kB 00:00 2026-04-01T02:23:24.623 INFO:teuthology.orchestra.run.vm08.stdout:(57/152): ceph-mgr-diskprediction-local-20.2.0- 2.0 MB/s | 7.4 MB 00:03 2026-04-01T02:23:24.630 INFO:teuthology.orchestra.run.vm08.stdout:(58/152): python3-grpcio-1.46.7-10.el9.x86_64.r 20 MB/s | 2.0 MB 00:00 2026-04-01T02:23:24.635 INFO:teuthology.orchestra.run.vm08.stdout:(59/152): python3-grpcio-tools-1.46.7-10.el9.x8 2.4 MB/s | 144 kB 00:00 2026-04-01T02:23:24.639 INFO:teuthology.orchestra.run.vm06.stdout:(86/152): thrift-0.15.0-4.el9.x86_64.rpm 15 MB/s | 1.6 MB 00:00 2026-04-01T02:23:24.685 INFO:teuthology.orchestra.run.vm08.stdout:(60/152): python3-isodate-0.6.1-3.el9.noarch.rp 1.0 MB/s | 56 kB 00:00 2026-04-01T02:23:24.685 INFO:teuthology.orchestra.run.vm03.stdout:(45/152): luarocks-3.9.2-5.el9.noarch.rpm 2.1 MB/s | 151 kB 00:00 2026-04-01T02:23:24.686 INFO:teuthology.orchestra.run.vm08.stdout:(61/152): python3-influxdb-5.3.1-1.el9.noarch.r 2.2 MB/s | 139 kB 00:00 2026-04-01T02:23:24.699 INFO:teuthology.orchestra.run.vm06.stdout:(87/152): bzip2-1.0.8-10.el9_5.x86_64.rpm 385 kB/s | 51 kB 00:00 2026-04-01T02:23:24.700 INFO:teuthology.orchestra.run.vm08.stdout:(62/152): python3-jaraco-8.2.1-3.el9.noarch.rpm 164 kB/s | 11 kB 00:00 2026-04-01T02:23:24.743 INFO:teuthology.orchestra.run.vm06.stdout:(88/152): c-ares-1.19.1-2.el9_4.x86_64.rpm 667 kB/s | 110 kB 00:00 2026-04-01T02:23:24.743 INFO:teuthology.orchestra.run.vm08.stdout:(63/152): python3-jaraco-classes-3.2.1-5.el9.no 305 kB/s | 18 kB 00:00 2026-04-01T02:23:24.744 INFO:teuthology.orchestra.run.vm08.stdout:(64/152): python3-jaraco-collections-3.0.0-8.el 401 kB/s | 23 kB 00:00 2026-04-01T02:23:24.752 INFO:teuthology.orchestra.run.vm06.stdout:(89/152): fuse-2.9.9-17.el9.x86_64.rpm 1.5 MB/s | 78 kB 00:00 2026-04-01T02:23:24.755 INFO:teuthology.orchestra.run.vm08.stdout:(65/152): python3-jaraco-context-6.0.1-3.el9.no 365 kB/s | 20 kB 00:00 2026-04-01T02:23:24.766 INFO:teuthology.orchestra.run.vm03.stdout:(46/152): parquet-libs-9.0.0-15.el9.x86_64.rpm 10 MB/s | 838 kB 00:00 2026-04-01T02:23:24.769 INFO:teuthology.orchestra.run.vm06.stdout:(90/152): ledmon-libs-1.1.0-3.el9.x86_64.rpm 1.5 MB/s | 41 kB 00:00 2026-04-01T02:23:24.779 INFO:teuthology.orchestra.run.vm06.stdout:(91/152): libconfig-1.7.2-9.el9.x86_64.rpm 2.5 MB/s | 71 kB 00:00 2026-04-01T02:23:24.798 INFO:teuthology.orchestra.run.vm08.stdout:(66/152): python3-jaraco-functools-3.5.0-2.el9. 356 kB/s | 19 kB 00:00 2026-04-01T02:23:24.800 INFO:teuthology.orchestra.run.vm08.stdout:(67/152): python3-jaraco-text-4.0.0-2.el9.noarc 480 kB/s | 26 kB 00:00 2026-04-01T02:23:24.831 INFO:teuthology.orchestra.run.vm06.stdout:(92/152): cryptsetup-2.7.2-4.el9.x86_64.rpm 1.6 MB/s | 310 kB 00:00 2026-04-01T02:23:24.834 INFO:teuthology.orchestra.run.vm06.stdout:(93/152): libquadmath-11.5.0-11.el9.x86_64.rpm 3.3 MB/s | 184 kB 00:00 2026-04-01T02:23:24.837 INFO:teuthology.orchestra.run.vm08.stdout:(68/152): python3-kubernetes-26.1.0-3.el9.noarc 13 MB/s | 1.0 MB 00:00 2026-04-01T02:23:24.843 INFO:teuthology.orchestra.run.vm03.stdout:(47/152): python3-asyncssh-2.13.2-5.el9.noarch. 7.0 MB/s | 548 kB 00:00 2026-04-01T02:23:24.853 INFO:teuthology.orchestra.run.vm06.stdout:(94/152): libgfortran-11.5.0-11.el9.x86_64.rpm 9.3 MB/s | 794 kB 00:00 2026-04-01T02:23:24.857 INFO:teuthology.orchestra.run.vm08.stdout:(69/152): python3-more-itertools-8.12.0-2.el9.n 1.3 MB/s | 79 kB 00:00 2026-04-01T02:23:24.866 INFO:teuthology.orchestra.run.vm06.stdout:(95/152): lmdb-libs-0.9.29-3.el9.x86_64.rpm 1.7 MB/s | 60 kB 00:00 2026-04-01T02:23:24.868 INFO:teuthology.orchestra.run.vm06.stdout:(96/152): mailcap-2.1.49-5.el9.0.2.noarch.rpm 970 kB/s | 32 kB 00:00 2026-04-01T02:23:24.869 INFO:teuthology.orchestra.run.vm08.stdout:(70/152): python3-msgpack-1.0.3-2.el9.x86_64.rp 1.2 MB/s | 86 kB 00:00 2026-04-01T02:23:24.894 INFO:teuthology.orchestra.run.vm06.stdout:(97/152): pciutils-3.7.0-7.el9.x86_64.rpm 3.3 MB/s | 92 kB 00:00 2026-04-01T02:23:24.898 INFO:teuthology.orchestra.run.vm06.stdout:(98/152): python3-cffi-1.14.5-5.el9.x86_64.rpm 7.9 MB/s | 241 kB 00:00 2026-04-01T02:23:24.900 INFO:teuthology.orchestra.run.vm08.stdout:(71/152): python3-natsort-7.1.1-5.el9.noarch.rp 916 kB/s | 58 kB 00:00 2026-04-01T02:23:24.908 INFO:teuthology.orchestra.run.vm06.stdout:(99/152): nvme-cli-2.13-1.el9.x86_64.rpm 18 MB/s | 1.0 MB 00:00 2026-04-01T02:23:24.910 INFO:teuthology.orchestra.run.vm03.stdout:(48/152): python3-autocommand-2.2.2-8.el9.noarc 443 kB/s | 29 kB 00:00 2026-04-01T02:23:24.913 INFO:teuthology.orchestra.run.vm08.stdout:(72/152): python3-portend-3.1.0-2.el9.noarch.rp 294 kB/s | 16 kB 00:00 2026-04-01T02:23:24.926 INFO:teuthology.orchestra.run.vm06.stdout:(100/152): python3-ply-3.11-14.el9.0.1.noarch.r 3.6 MB/s | 103 kB 00:00 2026-04-01T02:23:24.927 INFO:teuthology.orchestra.run.vm08.stdout:(73/152): python3-pyOpenSSL-21.0.0-1.el9.noarch 1.5 MB/s | 90 kB 00:00 2026-04-01T02:23:24.937 INFO:teuthology.orchestra.run.vm06.stdout:(101/152): python3-pycparser-2.20-6.el9.noarch. 4.2 MB/s | 124 kB 00:00 2026-04-01T02:23:24.954 INFO:teuthology.orchestra.run.vm08.stdout:(74/152): python3-repoze-lru-0.7-16.el9.noarch. 572 kB/s | 31 kB 00:00 2026-04-01T02:23:24.954 INFO:teuthology.orchestra.run.vm06.stdout:(102/152): python3-pyparsing-2.4.7-9.el9.0.1.no 5.2 MB/s | 150 kB 00:00 2026-04-01T02:23:24.965 INFO:teuthology.orchestra.run.vm06.stdout:(103/152): python3-requests-2.25.1-10.el9_6.noa 4.1 MB/s | 115 kB 00:00 2026-04-01T02:23:24.972 INFO:teuthology.orchestra.run.vm08.stdout:(75/152): python3-routes-2.5.1-5.el9.noarch.rpm 3.1 MB/s | 188 kB 00:00 2026-04-01T02:23:24.977 INFO:teuthology.orchestra.run.vm06.stdout:(104/152): python3-cryptography-36.0.1-5.el9_6. 14 MB/s | 1.2 MB 00:00 2026-04-01T02:23:24.977 INFO:teuthology.orchestra.run.vm03.stdout:(49/152): python3-backports-tarfile-1.2.0-1.el9 896 kB/s | 60 kB 00:00 2026-04-01T02:23:24.981 INFO:teuthology.orchestra.run.vm08.stdout:(76/152): python3-rsa-4.9-2.el9.noarch.rpm 1.1 MB/s | 59 kB 00:00 2026-04-01T02:23:24.983 INFO:teuthology.orchestra.run.vm06.stdout:(105/152): python3-urllib3-1.26.5-6.el9_7.1.noa 6.6 MB/s | 191 kB 00:00 2026-04-01T02:23:24.998 INFO:teuthology.orchestra.run.vm06.stdout:(106/152): smartmontools-7.2-9.el9.x86_64.rpm 17 MB/s | 551 kB 00:00 2026-04-01T02:23:25.006 INFO:teuthology.orchestra.run.vm06.stdout:(107/152): unzip-6.0-59.el9.x86_64.rpm 6.1 MB/s | 180 kB 00:00 2026-04-01T02:23:25.009 INFO:teuthology.orchestra.run.vm08.stdout:(77/152): python3-saml-1.16.0-1.el9.noarch.rpm 2.2 MB/s | 125 kB 00:00 2026-04-01T02:23:25.012 INFO:teuthology.orchestra.run.vm06.stdout:(108/152): zip-3.0-35.el9.x86_64.rpm 8.9 MB/s | 263 kB 00:00 2026-04-01T02:23:25.026 INFO:teuthology.orchestra.run.vm06.stdout:(109/152): boost-program-options-1.75.0-13.el9_ 3.7 MB/s | 104 kB 00:00 2026-04-01T02:23:25.028 INFO:teuthology.orchestra.run.vm08.stdout:(78/152): python3-tempora-5.0.0-2.el9.noarch.rp 642 kB/s | 36 kB 00:00 2026-04-01T02:23:25.033 INFO:teuthology.orchestra.run.vm06.stdout:(110/152): flexiblas-3.0.4-8.el9.0.1.x86_64.rpm 1.1 MB/s | 30 kB 00:00 2026-04-01T02:23:25.040 INFO:teuthology.orchestra.run.vm08.stdout:(79/152): python3-typing-extensions-4.15.0-1.el 1.5 MB/s | 86 kB 00:00 2026-04-01T02:23:25.045 INFO:teuthology.orchestra.run.vm03.stdout:(50/152): python3-bcrypt-3.2.2-1.el9.x86_64.rpm 646 kB/s | 43 kB 00:00 2026-04-01T02:23:25.052 INFO:teuthology.orchestra.run.vm06.stdout:(111/152): flexiblas-openblas-openmp-3.0.4-8.el 570 kB/s | 15 kB 00:00 2026-04-01T02:23:25.062 INFO:teuthology.orchestra.run.vm06.stdout:(112/152): libnbd-1.20.3-4.el9.x86_64.rpm 5.9 MB/s | 171 kB 00:00 2026-04-01T02:23:25.066 INFO:teuthology.orchestra.run.vm08.stdout:(80/152): python3-websocket-client-1.2.3-2.el9. 1.6 MB/s | 90 kB 00:00 2026-04-01T02:23:25.081 INFO:teuthology.orchestra.run.vm06.stdout:(113/152): libpmemobj-1.12.1-1.el9.x86_64.rpm 5.5 MB/s | 159 kB 00:00 2026-04-01T02:23:25.082 INFO:teuthology.orchestra.run.vm08.stdout:(81/152): python3-xmlsec-1.3.13-1.el9.x86_64.rp 896 kB/s | 48 kB 00:00 2026-04-01T02:23:25.089 INFO:teuthology.orchestra.run.vm06.stdout:(114/152): librabbitmq-0.11.0-7.el9.x86_64.rpm 1.6 MB/s | 44 kB 00:00 2026-04-01T02:23:25.100 INFO:teuthology.orchestra.run.vm08.stdout:(82/152): python3-xmltodict-0.12.0-15.el9.noarc 369 kB/s | 22 kB 00:00 2026-04-01T02:23:25.112 INFO:teuthology.orchestra.run.vm03.stdout:(51/152): python3-cachetools-4.2.4-1.el9.noarch 484 kB/s | 32 kB 00:00 2026-04-01T02:23:25.112 INFO:teuthology.orchestra.run.vm06.stdout:(115/152): flexiblas-netlib-3.0.4-8.el9.0.1.x86 30 MB/s | 3.0 MB 00:00 2026-04-01T02:23:25.118 INFO:teuthology.orchestra.run.vm06.stdout:(116/152): libstoragemgmt-1.10.1-1.el9.x86_64.r 8.2 MB/s | 243 kB 00:00 2026-04-01T02:23:25.119 INFO:teuthology.orchestra.run.vm08.stdout:(83/152): python3-zc-lockfile-2.0-10.el9.noarch 378 kB/s | 20 kB 00:00 2026-04-01T02:23:25.126 INFO:teuthology.orchestra.run.vm06.stdout:(117/152): librdkafka-1.6.1-102.el9.x86_64.rpm 14 MB/s | 662 kB 00:00 2026-04-01T02:23:25.142 INFO:teuthology.orchestra.run.vm08.stdout:(84/152): re2-20211101-20.el9.x86_64.rpm 3.2 MB/s | 191 kB 00:00 2026-04-01T02:23:25.142 INFO:teuthology.orchestra.run.vm06.stdout:(118/152): libxslt-1.1.34-13.el9_6.x86_64.rpm 7.8 MB/s | 239 kB 00:00 2026-04-01T02:23:25.148 INFO:teuthology.orchestra.run.vm06.stdout:(119/152): lttng-ust-2.12.0-6.el9.x86_64.rpm 9.4 MB/s | 282 kB 00:00 2026-04-01T02:23:25.155 INFO:teuthology.orchestra.run.vm06.stdout:(120/152): lua-5.4.4-4.el9.x86_64.rpm 6.5 MB/s | 187 kB 00:00 2026-04-01T02:23:25.169 INFO:teuthology.orchestra.run.vm06.stdout:(121/152): openblas-0.3.29-1.el9.x86_64.rpm 1.5 MB/s | 41 kB 00:00 2026-04-01T02:23:25.174 INFO:teuthology.orchestra.run.vm08.stdout:(85/152): s3cmd-2.4.0-1.el9.noarch.rpm 2.7 MB/s | 206 kB 00:00 2026-04-01T02:23:25.179 INFO:teuthology.orchestra.run.vm03.stdout:(52/152): python3-certifi-2023.05.07-4.el9.noar 212 kB/s | 14 kB 00:00 2026-04-01T02:23:25.185 INFO:teuthology.orchestra.run.vm06.stdout:(122/152): perl-Benchmark-1.23-481.1.el9_6.noar 856 kB/s | 25 kB 00:00 2026-04-01T02:23:25.202 INFO:teuthology.orchestra.run.vm06.stdout:(123/152): perl-Test-Harness-3.42-461.el9.noarc 8.0 MB/s | 267 kB 00:00 2026-04-01T02:23:25.212 INFO:teuthology.orchestra.run.vm08.stdout:(86/152): thrift-0.15.0-4.el9.x86_64.rpm 17 MB/s | 1.6 MB 00:00 2026-04-01T02:23:25.235 INFO:teuthology.orchestra.run.vm06.stdout:(124/152): protobuf-3.14.0-17.el9_7.x86_64.rpm 20 MB/s | 1.0 MB 00:00 2026-04-01T02:23:25.254 INFO:teuthology.orchestra.run.vm03.stdout:(53/152): python3-cheroot-10.0.1-5.el9.noarch.r 2.3 MB/s | 173 kB 00:00 2026-04-01T02:23:25.296 INFO:teuthology.orchestra.run.vm08.stdout:(87/152): bzip2-1.0.8-10.el9_5.x86_64.rpm 335 kB/s | 51 kB 00:00 2026-04-01T02:23:25.296 INFO:teuthology.orchestra.run.vm06.stdout:(125/152): openblas-openmp-0.3.29-1.el9.x86_64. 36 MB/s | 5.3 MB 00:00 2026-04-01T02:23:25.298 INFO:teuthology.orchestra.run.vm06.stdout:(126/152): python3-devel-3.9.23-2.el9.x86_64.rp 3.2 MB/s | 205 kB 00:00 2026-04-01T02:23:25.328 INFO:teuthology.orchestra.run.vm03.stdout:(54/152): python3-cherrypy-18.10.0-5.el9.noarch 3.8 MB/s | 290 kB 00:00 2026-04-01T02:23:25.330 INFO:teuthology.orchestra.run.vm06.stdout:(127/152): python3-jinja2-2.11.3-8.el9_5.noarch 6.7 MB/s | 228 kB 00:00 2026-04-01T02:23:25.331 INFO:teuthology.orchestra.run.vm06.stdout:(128/152): python3-jmespath-1.0.1-1.el9_7.noarc 1.3 MB/s | 43 kB 00:00 2026-04-01T02:23:25.348 INFO:teuthology.orchestra.run.vm08.stdout:(88/152): fuse-2.9.9-17.el9.x86_64.rpm 1.5 MB/s | 78 kB 00:00 2026-04-01T02:23:25.349 INFO:teuthology.orchestra.run.vm08.stdout:(89/152): c-ares-1.19.1-2.el9_4.x86_64.rpm 627 kB/s | 110 kB 00:00 2026-04-01T02:23:25.359 INFO:teuthology.orchestra.run.vm06.stdout:(129/152): python3-babel-2.9.1-2.el9.noarch.rpm 37 MB/s | 5.8 MB 00:00 2026-04-01T02:23:25.361 INFO:teuthology.orchestra.run.vm06.stdout:(130/152): python3-libstoragemgmt-1.10.1-1.el9. 5.3 MB/s | 166 kB 00:00 2026-04-01T02:23:25.373 INFO:teuthology.orchestra.run.vm06.stdout:(131/152): python3-lxml-4.6.5-3.el9.x86_64.rpm 29 MB/s | 1.2 MB 00:00 2026-04-01T02:23:25.375 INFO:teuthology.orchestra.run.vm08.stdout:(90/152): ledmon-libs-1.1.0-3.el9.x86_64.rpm 1.5 MB/s | 41 kB 00:00 2026-04-01T02:23:25.376 INFO:teuthology.orchestra.run.vm08.stdout:(91/152): libconfig-1.7.2-9.el9.x86_64.rpm 2.6 MB/s | 71 kB 00:00 2026-04-01T02:23:25.386 INFO:teuthology.orchestra.run.vm06.stdout:(132/152): python3-markupsafe-1.1.1-12.el9.x86_ 1.2 MB/s | 32 kB 00:00 2026-04-01T02:23:25.401 INFO:teuthology.orchestra.run.vm03.stdout:(55/152): python3-google-auth-2.45.0-1.el9.noar 3.4 MB/s | 254 kB 00:00 2026-04-01T02:23:25.410 INFO:teuthology.orchestra.run.vm06.stdout:(133/152): python3-numpy-f2py-1.23.5-2.el9_7.x8 9.7 MB/s | 368 kB 00:00 2026-04-01T02:23:25.416 INFO:teuthology.orchestra.run.vm06.stdout:(134/152): python3-packaging-20.9-5.el9.noarch. 2.2 MB/s | 69 kB 00:00 2026-04-01T02:23:25.417 INFO:teuthology.orchestra.run.vm08.stdout:(92/152): cryptsetup-2.7.2-4.el9.x86_64.rpm 1.5 MB/s | 310 kB 00:00 2026-04-01T02:23:25.433 INFO:teuthology.orchestra.run.vm08.stdout:(93/152): libquadmath-11.5.0-11.el9.x86_64.rpm 3.2 MB/s | 184 kB 00:00 2026-04-01T02:23:25.443 INFO:teuthology.orchestra.run.vm06.stdout:(135/152): python3-protobuf-3.14.0-17.el9_7.noa 7.3 MB/s | 237 kB 00:00 2026-04-01T02:23:25.445 INFO:teuthology.orchestra.run.vm06.stdout:(136/152): python3-pyasn1-0.4.8-7.el9_7.noarch. 4.4 MB/s | 132 kB 00:00 2026-04-01T02:23:25.446 INFO:teuthology.orchestra.run.vm08.stdout:(94/152): lmdb-libs-0.9.29-3.el9.x86_64.rpm 2.0 MB/s | 60 kB 00:00 2026-04-01T02:23:25.464 INFO:teuthology.orchestra.run.vm08.stdout:(95/152): mailcap-2.1.49-5.el9.0.2.noarch.rpm 1.0 MB/s | 32 kB 00:00 2026-04-01T02:23:25.467 INFO:teuthology.orchestra.run.vm08.stdout:(96/152): libgfortran-11.5.0-11.el9.x86_64.rpm 8.5 MB/s | 794 kB 00:00 2026-04-01T02:23:25.484 INFO:teuthology.orchestra.run.vm06.stdout:(137/152): python3-numpy-1.23.5-2.el9_7.x86_64. 47 MB/s | 5.8 MB 00:00 2026-04-01T02:23:25.485 INFO:teuthology.orchestra.run.vm06.stdout:(138/152): python3-requests-oauthlib-1.3.0-12.e 1.1 MB/s | 43 kB 00:00 2026-04-01T02:23:25.487 INFO:teuthology.orchestra.run.vm06.stdout:(139/152): python3-pyasn1-modules-0.4.8-7.el9_7 4.7 MB/s | 210 kB 00:00 2026-04-01T02:23:25.492 INFO:teuthology.orchestra.run.vm08.stdout:(97/152): pciutils-3.7.0-7.el9.x86_64.rpm 3.3 MB/s | 92 kB 00:00 2026-04-01T02:23:25.496 INFO:teuthology.orchestra.run.vm08.stdout:(98/152): python3-cffi-1.14.5-5.el9.x86_64.rpm 8.0 MB/s | 241 kB 00:00 2026-04-01T02:23:25.514 INFO:teuthology.orchestra.run.vm06.stdout:(140/152): python3-toml-0.10.2-6.el9.0.1.noarch 1.5 MB/s | 44 kB 00:00 2026-04-01T02:23:25.519 INFO:teuthology.orchestra.run.vm06.stdout:(141/152): qatlib-24.09.0-1.el9.x86_64.rpm 6.7 MB/s | 221 kB 00:00 2026-04-01T02:23:25.525 INFO:teuthology.orchestra.run.vm08.stdout:(99/152): python3-ply-3.11-14.el9.0.1.noarch.rp 3.6 MB/s | 103 kB 00:00 2026-04-01T02:23:25.528 INFO:teuthology.orchestra.run.vm08.stdout:(100/152): nvme-cli-2.13-1.el9.x86_64.rpm 12 MB/s | 1.0 MB 00:00 2026-04-01T02:23:25.554 INFO:teuthology.orchestra.run.vm06.stdout:(142/152): qatlib-service-24.09.0-1.el9.x86_64. 939 kB/s | 36 kB 00:00 2026-04-01T02:23:25.555 INFO:teuthology.orchestra.run.vm06.stdout:(143/152): qatzip-libs-1.3.1-1.el9.x86_64.rpm 1.8 MB/s | 65 kB 00:00 2026-04-01T02:23:25.559 INFO:teuthology.orchestra.run.vm03.stdout:(56/152): python3-grpcio-1.46.7-10.el9.x86_64.r 13 MB/s | 2.0 MB 00:00 2026-04-01T02:23:25.560 INFO:teuthology.orchestra.run.vm08.stdout:(101/152): python3-pycparser-2.20-6.el9.noarch. 3.5 MB/s | 124 kB 00:00 2026-04-01T02:23:25.564 INFO:teuthology.orchestra.run.vm08.stdout:(102/152): python3-pyparsing-2.4.7-9.el9.0.1.no 4.1 MB/s | 150 kB 00:00 2026-04-01T02:23:25.586 INFO:teuthology.orchestra.run.vm06.stdout:(144/152): socat-1.7.4.1-8.el9.x86_64.rpm 9.0 MB/s | 299 kB 00:00 2026-04-01T02:23:25.606 INFO:teuthology.orchestra.run.vm06.stdout:(145/152): xmlsec1-1.2.29-13.el9.x86_64.rpm 3.6 MB/s | 188 kB 00:00 2026-04-01T02:23:25.608 INFO:teuthology.orchestra.run.vm08.stdout:(103/152): python3-requests-2.25.1-10.el9_6.noa 2.4 MB/s | 115 kB 00:00 2026-04-01T02:23:25.624 INFO:teuthology.orchestra.run.vm06.stdout:(146/152): xmlsec1-openssl-1.2.29-13.el9.x86_64 2.3 MB/s | 89 kB 00:00 2026-04-01T02:23:25.624 INFO:teuthology.orchestra.run.vm03.stdout:(57/152): ceph-mgr-dashboard-20.2.0-8.g05971582 3.7 MB/s | 15 MB 00:04 2026-04-01T02:23:25.624 INFO:teuthology.orchestra.run.vm08.stdout:(104/152): python3-urllib3-1.26.5-6.el9_7.1.noa 3.1 MB/s | 191 kB 00:00 2026-04-01T02:23:25.629 INFO:teuthology.orchestra.run.vm03.stdout:(58/152): python3-grpcio-tools-1.46.7-10.el9.x8 2.0 MB/s | 144 kB 00:00 2026-04-01T02:23:25.633 INFO:teuthology.orchestra.run.vm06.stdout:(147/152): xmlstarlet-1.6.1-20.el9.x86_64.rpm 2.3 MB/s | 63 kB 00:00 2026-04-01T02:23:25.645 INFO:teuthology.orchestra.run.vm08.stdout:(105/152): smartmontools-7.2-9.el9.x86_64.rpm 15 MB/s | 551 kB 00:00 2026-04-01T02:23:25.656 INFO:teuthology.orchestra.run.vm06.stdout:(148/152): lua-devel-5.4.4-4.el9.x86_64.rpm 679 kB/s | 21 kB 00:00 2026-04-01T02:23:25.656 INFO:teuthology.orchestra.run.vm08.stdout:(106/152): unzip-6.0-59.el9.x86_64.rpm 5.6 MB/s | 180 kB 00:00 2026-04-01T02:23:25.676 INFO:teuthology.orchestra.run.vm08.stdout:(107/152): python3-cryptography-36.0.1-5.el9_6. 6.3 MB/s | 1.2 MB 00:00 2026-04-01T02:23:25.680 INFO:teuthology.orchestra.run.vm06.stdout:(149/152): protobuf-compiler-3.14.0-17.el9_7.x8 18 MB/s | 862 kB 00:00 2026-04-01T02:23:25.682 INFO:teuthology.orchestra.run.vm08.stdout:(108/152): zip-3.0-35.el9.x86_64.rpm 7.0 MB/s | 263 kB 00:00 2026-04-01T02:23:25.685 INFO:teuthology.orchestra.run.vm08.stdout:(109/152): boost-program-options-1.75.0-13.el9_ 3.6 MB/s | 104 kB 00:00 2026-04-01T02:23:25.699 INFO:teuthology.orchestra.run.vm03.stdout:(59/152): python3-isodate-0.6.1-3.el9.noarch.rp 814 kB/s | 56 kB 00:00 2026-04-01T02:23:25.703 INFO:teuthology.orchestra.run.vm08.stdout:(110/152): flexiblas-3.0.4-8.el9.0.1.x86_64.rpm 1.1 MB/s | 30 kB 00:00 2026-04-01T02:23:25.714 INFO:teuthology.orchestra.run.vm08.stdout:(111/152): flexiblas-openblas-openmp-3.0.4-8.el 520 kB/s | 15 kB 00:00 2026-04-01T02:23:25.745 INFO:teuthology.orchestra.run.vm08.stdout:(112/152): libpmemobj-1.12.1-1.el9.x86_64.rpm 5.0 MB/s | 159 kB 00:00 2026-04-01T02:23:25.746 INFO:teuthology.orchestra.run.vm08.stdout:(113/152): libnbd-1.20.3-4.el9.x86_64.rpm 3.9 MB/s | 171 kB 00:00 2026-04-01T02:23:25.765 INFO:teuthology.orchestra.run.vm03.stdout:(60/152): python3-jaraco-8.2.1-3.el9.noarch.rpm 162 kB/s | 11 kB 00:00 2026-04-01T02:23:25.778 INFO:teuthology.orchestra.run.vm08.stdout:(114/152): librabbitmq-0.11.0-7.el9.x86_64.rpm 1.3 MB/s | 44 kB 00:00 2026-04-01T02:23:25.800 INFO:teuthology.orchestra.run.vm08.stdout:(115/152): librdkafka-1.6.1-102.el9.x86_64.rpm 12 MB/s | 662 kB 00:00 2026-04-01T02:23:25.808 INFO:teuthology.orchestra.run.vm08.stdout:(116/152): flexiblas-netlib-3.0.4-8.el9.0.1.x86 24 MB/s | 3.0 MB 00:00 2026-04-01T02:23:25.817 INFO:teuthology.orchestra.run.vm08.stdout:(117/152): libstoragemgmt-1.10.1-1.el9.x86_64.r 6.1 MB/s | 243 kB 00:00 2026-04-01T02:23:25.829 INFO:teuthology.orchestra.run.vm08.stdout:(118/152): libxslt-1.1.34-13.el9_6.x86_64.rpm 8.1 MB/s | 239 kB 00:00 2026-04-01T02:23:25.833 INFO:teuthology.orchestra.run.vm03.stdout:(61/152): python3-jaraco-classes-3.2.1-5.el9.no 261 kB/s | 18 kB 00:00 2026-04-01T02:23:25.838 INFO:teuthology.orchestra.run.vm08.stdout:(119/152): lttng-ust-2.12.0-6.el9.x86_64.rpm 9.4 MB/s | 282 kB 00:00 2026-04-01T02:23:25.857 INFO:teuthology.orchestra.run.vm08.stdout:(120/152): lua-5.4.4-4.el9.x86_64.rpm 4.6 MB/s | 187 kB 00:00 2026-04-01T02:23:25.857 INFO:teuthology.orchestra.run.vm06.stdout:(150/152): python3-scipy-1.9.3-2.el9.x86_64.rpm 50 MB/s | 19 MB 00:00 2026-04-01T02:23:25.858 INFO:teuthology.orchestra.run.vm08.stdout:(121/152): openblas-0.3.29-1.el9.x86_64.rpm 1.4 MB/s | 41 kB 00:00 2026-04-01T02:23:25.894 INFO:teuthology.orchestra.run.vm08.stdout:(122/152): perl-Benchmark-1.23-481.1.el9_6.noar 693 kB/s | 25 kB 00:00 2026-04-01T02:23:25.897 INFO:teuthology.orchestra.run.vm08.stdout:(123/152): perl-Test-Harness-3.42-461.el9.noarc 6.6 MB/s | 267 kB 00:00 2026-04-01T02:23:25.900 INFO:teuthology.orchestra.run.vm03.stdout:(62/152): python3-jaraco-collections-3.0.0-8.el 347 kB/s | 23 kB 00:00 2026-04-01T02:23:26.010 INFO:teuthology.orchestra.run.vm08.stdout:(124/152): protobuf-3.14.0-17.el9_7.x86_64.rpm 8.7 MB/s | 1.0 MB 00:00 2026-04-01T02:23:26.021 INFO:teuthology.orchestra.run.vm03.stdout:(63/152): python3-influxdb-5.3.1-1.el9.noarch.r 351 kB/s | 139 kB 00:00 2026-04-01T02:23:26.038 INFO:teuthology.orchestra.run.vm08.stdout:(125/152): openblas-openmp-0.3.29-1.el9.x86_64. 26 MB/s | 5.3 MB 00:00 2026-04-01T02:23:26.040 INFO:teuthology.orchestra.run.vm08.stdout:(126/152): python3-devel-3.9.23-2.el9.x86_64.rp 6.7 MB/s | 205 kB 00:00 2026-04-01T02:23:26.068 INFO:teuthology.orchestra.run.vm08.stdout:(127/152): python3-jmespath-1.0.1-1.el9_7.noarc 1.5 MB/s | 43 kB 00:00 2026-04-01T02:23:26.070 INFO:teuthology.orchestra.run.vm08.stdout:(128/152): python3-jinja2-2.11.3-8.el9_5.noarch 7.0 MB/s | 228 kB 00:00 2026-04-01T02:23:26.085 INFO:teuthology.orchestra.run.vm08.stdout:(129/152): python3-babel-2.9.1-2.el9.noarch.rpm 31 MB/s | 5.8 MB 00:00 2026-04-01T02:23:26.087 INFO:teuthology.orchestra.run.vm03.stdout:(64/152): python3-jaraco-functools-3.5.0-2.el9. 296 kB/s | 19 kB 00:00 2026-04-01T02:23:26.098 INFO:teuthology.orchestra.run.vm08.stdout:(130/152): python3-libstoragemgmt-1.10.1-1.el9. 5.5 MB/s | 166 kB 00:00 2026-04-01T02:23:26.112 INFO:teuthology.orchestra.run.vm08.stdout:(131/152): python3-markupsafe-1.1.1-12.el9.x86_ 1.2 MB/s | 32 kB 00:00 2026-04-01T02:23:26.130 INFO:teuthology.orchestra.run.vm06.stdout:(151/152): librbd1-20.2.0-8.g0597158282e.el9.cl 6.3 MB/s | 2.8 MB 00:00 2026-04-01T02:23:26.132 INFO:teuthology.orchestra.run.vm08.stdout:(132/152): python3-lxml-4.6.5-3.el9.x86_64.rpm 19 MB/s | 1.2 MB 00:00 2026-04-01T02:23:26.143 INFO:teuthology.orchestra.run.vm08.stdout:(133/152): python3-numpy-f2py-1.23.5-2.el9_7.x8 12 MB/s | 368 kB 00:00 2026-04-01T02:23:26.153 INFO:teuthology.orchestra.run.vm03.stdout:(65/152): python3-jaraco-text-4.0.0-2.el9.noarc 399 kB/s | 26 kB 00:00 2026-04-01T02:23:26.161 INFO:teuthology.orchestra.run.vm08.stdout:(134/152): python3-packaging-20.9-5.el9.noarch. 2.4 MB/s | 69 kB 00:00 2026-04-01T02:23:26.172 INFO:teuthology.orchestra.run.vm08.stdout:(135/152): python3-protobuf-3.14.0-17.el9_7.noa 8.0 MB/s | 237 kB 00:00 2026-04-01T02:23:26.189 INFO:teuthology.orchestra.run.vm08.stdout:(136/152): python3-pyasn1-0.4.8-7.el9_7.noarch. 4.6 MB/s | 132 kB 00:00 2026-04-01T02:23:26.201 INFO:teuthology.orchestra.run.vm08.stdout:(137/152): python3-pyasn1-modules-0.4.8-7.el9_7 7.2 MB/s | 210 kB 00:00 2026-04-01T02:23:26.216 INFO:teuthology.orchestra.run.vm08.stdout:(138/152): python3-requests-oauthlib-1.3.0-12.e 1.6 MB/s | 43 kB 00:00 2026-04-01T02:23:26.243 INFO:teuthology.orchestra.run.vm08.stdout:(139/152): python3-toml-0.10.2-6.el9.0.1.noarch 1.6 MB/s | 44 kB 00:00 2026-04-01T02:23:26.271 INFO:teuthology.orchestra.run.vm08.stdout:(140/152): qatlib-24.09.0-1.el9.x86_64.rpm 7.6 MB/s | 221 kB 00:00 2026-04-01T02:23:26.298 INFO:teuthology.orchestra.run.vm08.stdout:(141/152): qatlib-service-24.09.0-1.el9.x86_64. 1.3 MB/s | 36 kB 00:00 2026-04-01T02:23:26.325 INFO:teuthology.orchestra.run.vm08.stdout:(142/152): qatzip-libs-1.3.1-1.el9.x86_64.rpm 2.4 MB/s | 65 kB 00:00 2026-04-01T02:23:26.339 INFO:teuthology.orchestra.run.vm06.stdout:(152/152): librados2-20.2.0-8.g0597158282e.el9. 5.2 MB/s | 3.5 MB 00:00 2026-04-01T02:23:26.344 INFO:teuthology.orchestra.run.vm06.stdout:-------------------------------------------------------------------------------- 2026-04-01T02:23:26.345 INFO:teuthology.orchestra.run.vm06.stdout:Total 26 MB/s | 274 MB 00:10 2026-04-01T02:23:26.360 INFO:teuthology.orchestra.run.vm08.stdout:(143/152): socat-1.7.4.1-8.el9.x86_64.rpm 8.6 MB/s | 299 kB 00:00 2026-04-01T02:23:26.361 INFO:teuthology.orchestra.run.vm03.stdout:(66/152): python3-kubernetes-26.1.0-3.el9.noarc 4.9 MB/s | 1.0 MB 00:00 2026-04-01T02:23:26.395 INFO:teuthology.orchestra.run.vm08.stdout:(144/152): xmlsec1-1.2.29-13.el9.x86_64.rpm 5.2 MB/s | 188 kB 00:00 2026-04-01T02:23:26.425 INFO:teuthology.orchestra.run.vm08.stdout:(145/152): xmlsec1-openssl-1.2.29-13.el9.x86_64 3.0 MB/s | 89 kB 00:00 2026-04-01T02:23:26.426 INFO:teuthology.orchestra.run.vm03.stdout:(67/152): python3-jaraco-context-6.0.1-3.el9.no 37 kB/s | 20 kB 00:00 2026-04-01T02:23:26.433 INFO:teuthology.orchestra.run.vm03.stdout:(68/152): python3-more-itertools-8.12.0-2.el9.n 1.1 MB/s | 79 kB 00:00 2026-04-01T02:23:26.453 INFO:teuthology.orchestra.run.vm08.stdout:(146/152): xmlstarlet-1.6.1-20.el9.x86_64.rpm 2.2 MB/s | 63 kB 00:00 2026-04-01T02:23:26.478 INFO:teuthology.orchestra.run.vm08.stdout:(147/152): python3-numpy-1.23.5-2.el9_7.x86_64. 15 MB/s | 5.8 MB 00:00 2026-04-01T02:23:26.481 INFO:teuthology.orchestra.run.vm08.stdout:(148/152): lua-devel-5.4.4-4.el9.x86_64.rpm 768 kB/s | 21 kB 00:00 2026-04-01T02:23:26.496 INFO:teuthology.orchestra.run.vm03.stdout:(69/152): python3-msgpack-1.0.3-2.el9.x86_64.rp 1.2 MB/s | 86 kB 00:00 2026-04-01T02:23:26.499 INFO:teuthology.orchestra.run.vm03.stdout:(70/152): python3-natsort-7.1.1-5.el9.noarch.rp 873 kB/s | 58 kB 00:00 2026-04-01T02:23:26.537 INFO:teuthology.orchestra.run.vm08.stdout:(149/152): protobuf-compiler-3.14.0-17.el9_7.x8 14 MB/s | 862 kB 00:00 2026-04-01T02:23:26.562 INFO:teuthology.orchestra.run.vm03.stdout:(71/152): python3-portend-3.1.0-2.el9.noarch.rp 249 kB/s | 16 kB 00:00 2026-04-01T02:23:26.566 INFO:teuthology.orchestra.run.vm03.stdout:(72/152): python3-pyOpenSSL-21.0.0-1.el9.noarch 1.3 MB/s | 90 kB 00:00 2026-04-01T02:23:26.631 INFO:teuthology.orchestra.run.vm03.stdout:(73/152): python3-repoze-lru-0.7-16.el9.noarch. 449 kB/s | 31 kB 00:00 2026-04-01T02:23:26.636 INFO:teuthology.orchestra.run.vm03.stdout:(74/152): python3-routes-2.5.1-5.el9.noarch.rpm 2.7 MB/s | 188 kB 00:00 2026-04-01T02:23:26.698 INFO:teuthology.orchestra.run.vm03.stdout:(75/152): python3-rsa-4.9-2.el9.noarch.rpm 882 kB/s | 59 kB 00:00 2026-04-01T02:23:26.704 INFO:teuthology.orchestra.run.vm03.stdout:(76/152): python3-saml-1.16.0-1.el9.noarch.rpm 1.8 MB/s | 125 kB 00:00 2026-04-01T02:23:26.757 INFO:teuthology.orchestra.run.vm08.stdout:(150/152): python3-scipy-1.9.3-2.el9.x86_64.rpm 34 MB/s | 19 MB 00:00 2026-04-01T02:23:26.765 INFO:teuthology.orchestra.run.vm03.stdout:(77/152): python3-tempora-5.0.0-2.el9.noarch.rp 542 kB/s | 36 kB 00:00 2026-04-01T02:23:26.775 INFO:teuthology.orchestra.run.vm03.stdout:(78/152): python3-typing-extensions-4.15.0-1.el 1.2 MB/s | 86 kB 00:00 2026-04-01T02:23:26.835 INFO:teuthology.orchestra.run.vm03.stdout:(79/152): python3-websocket-client-1.2.3-2.el9. 1.2 MB/s | 90 kB 00:00 2026-04-01T02:23:26.842 INFO:teuthology.orchestra.run.vm03.stdout:(80/152): python3-xmlsec-1.3.13-1.el9.x86_64.rp 722 kB/s | 48 kB 00:00 2026-04-01T02:23:26.902 INFO:teuthology.orchestra.run.vm03.stdout:(81/152): python3-xmltodict-0.12.0-15.el9.noarc 334 kB/s | 22 kB 00:00 2026-04-01T02:23:26.908 INFO:teuthology.orchestra.run.vm03.stdout:(82/152): python3-zc-lockfile-2.0-10.el9.noarch 305 kB/s | 20 kB 00:00 2026-04-01T02:23:26.973 INFO:teuthology.orchestra.run.vm03.stdout:(83/152): re2-20211101-20.el9.x86_64.rpm 2.6 MB/s | 191 kB 00:00 2026-04-01T02:23:26.978 INFO:teuthology.orchestra.run.vm03.stdout:(84/152): s3cmd-2.4.0-1.el9.noarch.rpm 2.9 MB/s | 206 kB 00:00 2026-04-01T02:23:27.068 INFO:teuthology.orchestra.run.vm08.stdout:(151/152): librados2-20.2.0-8.g0597158282e.el9. 6.0 MB/s | 3.5 MB 00:00 2026-04-01T02:23:27.114 INFO:teuthology.orchestra.run.vm03.stdout:(85/152): bzip2-1.0.8-10.el9_5.x86_64.rpm 381 kB/s | 51 kB 00:00 2026-04-01T02:23:27.140 INFO:teuthology.orchestra.run.vm06.stdout:Running transaction check 2026-04-01T02:23:27.168 INFO:teuthology.orchestra.run.vm03.stdout:(86/152): c-ares-1.19.1-2.el9_4.x86_64.rpm 2.0 MB/s | 110 kB 00:00 2026-04-01T02:23:27.180 INFO:teuthology.orchestra.run.vm03.stdout:(87/152): thrift-0.15.0-4.el9.x86_64.rpm 7.7 MB/s | 1.6 MB 00:00 2026-04-01T02:23:27.207 INFO:teuthology.orchestra.run.vm06.stdout:Transaction check succeeded. 2026-04-01T02:23:27.207 INFO:teuthology.orchestra.run.vm06.stdout:Running transaction test 2026-04-01T02:23:27.221 INFO:teuthology.orchestra.run.vm03.stdout:(88/152): cryptsetup-2.7.2-4.el9.x86_64.rpm 5.7 MB/s | 310 kB 00:00 2026-04-01T02:23:27.248 INFO:teuthology.orchestra.run.vm03.stdout:(89/152): ledmon-libs-1.1.0-3.el9.x86_64.rpm 1.5 MB/s | 41 kB 00:00 2026-04-01T02:23:27.275 INFO:teuthology.orchestra.run.vm03.stdout:(90/152): libconfig-1.7.2-9.el9.x86_64.rpm 2.6 MB/s | 71 kB 00:00 2026-04-01T02:23:27.297 INFO:teuthology.orchestra.run.vm08.stdout:(152/152): librbd1-20.2.0-8.g0597158282e.el9.cl 3.7 MB/s | 2.8 MB 00:00 2026-04-01T02:23:27.303 INFO:teuthology.orchestra.run.vm08.stdout:-------------------------------------------------------------------------------- 2026-04-01T02:23:27.304 INFO:teuthology.orchestra.run.vm08.stdout:Total 23 MB/s | 274 MB 00:12 2026-04-01T02:23:27.314 INFO:teuthology.orchestra.run.vm03.stdout:(91/152): fuse-2.9.9-17.el9.x86_64.rpm 580 kB/s | 78 kB 00:00 2026-04-01T02:23:27.332 INFO:teuthology.orchestra.run.vm03.stdout:(92/152): libgfortran-11.5.0-11.el9.x86_64.rpm 14 MB/s | 794 kB 00:00 2026-04-01T02:23:27.359 INFO:teuthology.orchestra.run.vm03.stdout:(93/152): lmdb-libs-0.9.29-3.el9.x86_64.rpm 2.2 MB/s | 60 kB 00:00 2026-04-01T02:23:27.367 INFO:teuthology.orchestra.run.vm03.stdout:(94/152): libquadmath-11.5.0-11.el9.x86_64.rpm 3.4 MB/s | 184 kB 00:00 2026-04-01T02:23:27.386 INFO:teuthology.orchestra.run.vm03.stdout:(95/152): mailcap-2.1.49-5.el9.0.2.noarch.rpm 1.2 MB/s | 32 kB 00:00 2026-04-01T02:23:27.413 INFO:teuthology.orchestra.run.vm03.stdout:(96/152): pciutils-3.7.0-7.el9.x86_64.rpm 3.3 MB/s | 92 kB 00:00 2026-04-01T02:23:27.443 INFO:teuthology.orchestra.run.vm03.stdout:(97/152): python3-cffi-1.14.5-5.el9.x86_64.rpm 8.0 MB/s | 241 kB 00:00 2026-04-01T02:23:27.449 INFO:teuthology.orchestra.run.vm03.stdout:(98/152): nvme-cli-2.13-1.el9.x86_64.rpm 12 MB/s | 1.0 MB 00:00 2026-04-01T02:23:27.479 INFO:teuthology.orchestra.run.vm03.stdout:(99/152): python3-ply-3.11-14.el9.0.1.noarch.rp 3.4 MB/s | 103 kB 00:00 2026-04-01T02:23:27.489 INFO:teuthology.orchestra.run.vm03.stdout:(100/152): python3-cryptography-36.0.1-5.el9_6. 26 MB/s | 1.2 MB 00:00 2026-04-01T02:23:27.506 INFO:teuthology.orchestra.run.vm03.stdout:(101/152): python3-pycparser-2.20-6.el9.noarch. 4.4 MB/s | 124 kB 00:00 2026-04-01T02:23:27.518 INFO:teuthology.orchestra.run.vm03.stdout:(102/152): python3-pyparsing-2.4.7-9.el9.0.1.no 5.1 MB/s | 150 kB 00:00 2026-04-01T02:23:27.534 INFO:teuthology.orchestra.run.vm03.stdout:(103/152): python3-requests-2.25.1-10.el9_6.noa 4.1 MB/s | 115 kB 00:00 2026-04-01T02:23:27.547 INFO:teuthology.orchestra.run.vm03.stdout:(104/152): python3-urllib3-1.26.5-6.el9_7.1.noa 6.4 MB/s | 191 kB 00:00 2026-04-01T02:23:27.566 INFO:teuthology.orchestra.run.vm03.stdout:(105/152): smartmontools-7.2-9.el9.x86_64.rpm 17 MB/s | 551 kB 00:00 2026-04-01T02:23:27.576 INFO:teuthology.orchestra.run.vm03.stdout:(106/152): unzip-6.0-59.el9.x86_64.rpm 6.2 MB/s | 180 kB 00:00 2026-04-01T02:23:27.596 INFO:teuthology.orchestra.run.vm03.stdout:(107/152): zip-3.0-35.el9.x86_64.rpm 8.6 MB/s | 263 kB 00:00 2026-04-01T02:23:27.604 INFO:teuthology.orchestra.run.vm03.stdout:(108/152): boost-program-options-1.75.0-13.el9_ 3.7 MB/s | 104 kB 00:00 2026-04-01T02:23:27.623 INFO:teuthology.orchestra.run.vm03.stdout:(109/152): flexiblas-3.0.4-8.el9.0.1.x86_64.rpm 1.1 MB/s | 30 kB 00:00 2026-04-01T02:23:27.650 INFO:teuthology.orchestra.run.vm03.stdout:(110/152): flexiblas-openblas-openmp-3.0.4-8.el 572 kB/s | 15 kB 00:00 2026-04-01T02:23:27.673 INFO:teuthology.orchestra.run.vm03.stdout:(111/152): flexiblas-netlib-3.0.4-8.el9.0.1.x86 44 MB/s | 3.0 MB 00:00 2026-04-01T02:23:27.678 INFO:teuthology.orchestra.run.vm03.stdout:(112/152): libnbd-1.20.3-4.el9.x86_64.rpm 5.9 MB/s | 171 kB 00:00 2026-04-01T02:23:27.701 INFO:teuthology.orchestra.run.vm03.stdout:(113/152): libpmemobj-1.12.1-1.el9.x86_64.rpm 5.6 MB/s | 159 kB 00:00 2026-04-01T02:23:27.705 INFO:teuthology.orchestra.run.vm03.stdout:(114/152): librabbitmq-0.11.0-7.el9.x86_64.rpm 1.6 MB/s | 44 kB 00:00 2026-04-01T02:23:27.737 INFO:teuthology.orchestra.run.vm03.stdout:(115/152): librdkafka-1.6.1-102.el9.x86_64.rpm 18 MB/s | 662 kB 00:00 2026-04-01T02:23:27.738 INFO:teuthology.orchestra.run.vm03.stdout:(116/152): libstoragemgmt-1.10.1-1.el9.x86_64.r 7.2 MB/s | 243 kB 00:00 2026-04-01T02:23:27.766 INFO:teuthology.orchestra.run.vm03.stdout:(117/152): libxslt-1.1.34-13.el9_6.x86_64.rpm 8.0 MB/s | 239 kB 00:00 2026-04-01T02:23:27.770 INFO:teuthology.orchestra.run.vm03.stdout:(118/152): lttng-ust-2.12.0-6.el9.x86_64.rpm 8.9 MB/s | 282 kB 00:00 2026-04-01T02:23:27.795 INFO:teuthology.orchestra.run.vm03.stdout:(119/152): lua-5.4.4-4.el9.x86_64.rpm 6.5 MB/s | 187 kB 00:00 2026-04-01T02:23:27.797 INFO:teuthology.orchestra.run.vm03.stdout:(120/152): openblas-0.3.29-1.el9.x86_64.rpm 1.5 MB/s | 41 kB 00:00 2026-04-01T02:23:27.827 INFO:teuthology.orchestra.run.vm03.stdout:(121/152): perl-Benchmark-1.23-481.1.el9_6.noar 858 kB/s | 25 kB 00:00 2026-04-01T02:23:27.858 INFO:teuthology.orchestra.run.vm03.stdout:(122/152): perl-Test-Harness-3.42-461.el9.noarc 8.6 MB/s | 267 kB 00:00 2026-04-01T02:23:27.912 INFO:teuthology.orchestra.run.vm03.stdout:(123/152): openblas-openmp-0.3.29-1.el9.x86_64. 45 MB/s | 5.3 MB 00:00 2026-04-01T02:23:27.917 INFO:teuthology.orchestra.run.vm03.stdout:(124/152): protobuf-3.14.0-17.el9_7.x86_64.rpm 17 MB/s | 1.0 MB 00:00 2026-04-01T02:23:27.951 INFO:teuthology.orchestra.run.vm03.stdout:(125/152): python3-devel-3.9.23-2.el9.x86_64.rp 5.9 MB/s | 205 kB 00:00 2026-04-01T02:23:27.984 INFO:teuthology.orchestra.run.vm03.stdout:(126/152): python3-jinja2-2.11.3-8.el9_5.noarch 6.8 MB/s | 228 kB 00:00 2026-04-01T02:23:28.016 INFO:teuthology.orchestra.run.vm03.stdout:(127/152): python3-jmespath-1.0.1-1.el9_7.noarc 1.4 MB/s | 43 kB 00:00 2026-04-01T02:23:28.053 INFO:teuthology.orchestra.run.vm03.stdout:(128/152): python3-babel-2.9.1-2.el9.noarch.rpm 41 MB/s | 5.8 MB 00:00 2026-04-01T02:23:28.055 INFO:teuthology.orchestra.run.vm03.stdout:(129/152): python3-libstoragemgmt-1.10.1-1.el9. 4.2 MB/s | 166 kB 00:00 2026-04-01T02:23:28.087 INFO:teuthology.orchestra.run.vm03.stdout:(130/152): python3-markupsafe-1.1.1-12.el9.x86_ 990 kB/s | 32 kB 00:00 2026-04-01T02:23:28.104 INFO:teuthology.orchestra.run.vm03.stdout:(131/152): python3-lxml-4.6.5-3.el9.x86_64.rpm 24 MB/s | 1.2 MB 00:00 2026-04-01T02:23:28.148 INFO:teuthology.orchestra.run.vm03.stdout:(132/152): python3-numpy-f2py-1.23.5-2.el9_7.x8 8.2 MB/s | 368 kB 00:00 2026-04-01T02:23:28.172 INFO:teuthology.orchestra.run.vm08.stdout:Running transaction check 2026-04-01T02:23:28.181 INFO:teuthology.orchestra.run.vm03.stdout:(133/152): python3-packaging-20.9-5.el9.noarch. 2.0 MB/s | 69 kB 00:00 2026-04-01T02:23:28.220 INFO:teuthology.orchestra.run.vm03.stdout:(134/152): python3-protobuf-3.14.0-17.el9_7.noa 6.1 MB/s | 237 kB 00:00 2026-04-01T02:23:28.238 INFO:teuthology.orchestra.run.vm08.stdout:Transaction check succeeded. 2026-04-01T02:23:28.238 INFO:teuthology.orchestra.run.vm08.stdout:Running transaction test 2026-04-01T02:23:28.247 INFO:teuthology.orchestra.run.vm03.stdout:(135/152): python3-numpy-1.23.5-2.el9_7.x86_64. 36 MB/s | 5.8 MB 00:00 2026-04-01T02:23:28.249 INFO:teuthology.orchestra.run.vm03.stdout:(136/152): python3-pyasn1-0.4.8-7.el9_7.noarch. 4.4 MB/s | 132 kB 00:00 2026-04-01T02:23:28.277 INFO:teuthology.orchestra.run.vm03.stdout:(137/152): python3-pyasn1-modules-0.4.8-7.el9_7 7.1 MB/s | 210 kB 00:00 2026-04-01T02:23:28.278 INFO:teuthology.orchestra.run.vm03.stdout:(138/152): python3-requests-oauthlib-1.3.0-12.e 1.5 MB/s | 43 kB 00:00 2026-04-01T02:23:28.308 INFO:teuthology.orchestra.run.vm03.stdout:(139/152): python3-toml-0.10.2-6.el9.0.1.noarch 1.4 MB/s | 44 kB 00:00 2026-04-01T02:23:28.340 INFO:teuthology.orchestra.run.vm03.stdout:(140/152): qatlib-24.09.0-1.el9.x86_64.rpm 6.8 MB/s | 221 kB 00:00 2026-04-01T02:23:28.370 INFO:teuthology.orchestra.run.vm03.stdout:(141/152): qatlib-service-24.09.0-1.el9.x86_64. 1.2 MB/s | 36 kB 00:00 2026-04-01T02:23:28.387 INFO:teuthology.orchestra.run.vm06.stdout:Transaction test succeeded. 2026-04-01T02:23:28.387 INFO:teuthology.orchestra.run.vm06.stdout:Running transaction 2026-04-01T02:23:28.405 INFO:teuthology.orchestra.run.vm03.stdout:(142/152): qatzip-libs-1.3.1-1.el9.x86_64.rpm 1.8 MB/s | 65 kB 00:00 2026-04-01T02:23:28.439 INFO:teuthology.orchestra.run.vm03.stdout:(143/152): socat-1.7.4.1-8.el9.x86_64.rpm 8.9 MB/s | 299 kB 00:00 2026-04-01T02:23:28.469 INFO:teuthology.orchestra.run.vm03.stdout:(144/152): xmlsec1-1.2.29-13.el9.x86_64.rpm 6.1 MB/s | 188 kB 00:00 2026-04-01T02:23:28.500 INFO:teuthology.orchestra.run.vm03.stdout:(145/152): xmlsec1-openssl-1.2.29-13.el9.x86_64 2.9 MB/s | 89 kB 00:00 2026-04-01T02:23:28.543 INFO:teuthology.orchestra.run.vm03.stdout:(146/152): xmlstarlet-1.6.1-20.el9.x86_64.rpm 1.4 MB/s | 63 kB 00:00 2026-04-01T02:23:28.573 INFO:teuthology.orchestra.run.vm03.stdout:(147/152): lua-devel-5.4.4-4.el9.x86_64.rpm 733 kB/s | 21 kB 00:00 2026-04-01T02:23:28.638 INFO:teuthology.orchestra.run.vm03.stdout:(148/152): python3-scipy-1.9.3-2.el9.x86_64.rpm 52 MB/s | 19 MB 00:00 2026-04-01T02:23:28.645 INFO:teuthology.orchestra.run.vm03.stdout:(149/152): protobuf-compiler-3.14.0-17.el9_7.x8 12 MB/s | 862 kB 00:00 2026-04-01T02:23:29.017 INFO:teuthology.orchestra.run.vm03.stdout:(150/152): librbd1-20.2.0-8.g0597158282e.el9.cl 7.7 MB/s | 2.8 MB 00:00 2026-04-01T02:23:29.364 INFO:teuthology.orchestra.run.vm08.stdout:Transaction test succeeded. 2026-04-01T02:23:29.364 INFO:teuthology.orchestra.run.vm08.stdout:Running transaction 2026-04-01T02:23:29.622 INFO:teuthology.orchestra.run.vm06.stdout: Preparing : 1/1 2026-04-01T02:23:29.631 INFO:teuthology.orchestra.run.vm06.stdout: Installing : thrift-0.15.0-4.el9.x86_64 1/154 2026-04-01T02:23:29.645 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-more-itertools-8.12.0-2.el9.noarch 2/154 2026-04-01T02:23:29.653 INFO:teuthology.orchestra.run.vm03.stdout:(151/152): librados2-20.2.0-8.g0597158282e.el9. 3.5 MB/s | 3.5 MB 00:01 2026-04-01T02:23:29.832 INFO:teuthology.orchestra.run.vm06.stdout: Installing : lttng-ust-2.12.0-6.el9.x86_64 3/154 2026-04-01T02:23:29.835 INFO:teuthology.orchestra.run.vm06.stdout: Upgrading : librados2-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 4/154 2026-04-01T02:23:29.900 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: librados2-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 4/154 2026-04-01T02:23:29.903 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libcephfs2-2:20.2.0-8.g0597158282e.el9.clyso.x86 5/154 2026-04-01T02:23:29.920 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: libcephfs2-2:20.2.0-8.g0597158282e.el9.clyso.x86 5/154 2026-04-01T02:23:29.924 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libcephfs-daemon-2:20.2.0-8.g0597158282e.el9.cly 6/154 2026-04-01T02:23:29.925 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libcephfs-proxy2-2:20.2.0-8.g0597158282e.el9.cly 7/154 2026-04-01T02:23:29.957 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: libcephfs-proxy2-2:20.2.0-8.g0597158282e.el9.cly 7/154 2026-04-01T02:23:29.966 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-rados-2:20.2.0-8.g0597158282e.el9.clyso. 8/154 2026-04-01T02:23:29.981 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libxslt-1.1.34-13.el9_6.x86_64 9/154 2026-04-01T02:23:29.985 INFO:teuthology.orchestra.run.vm06.stdout: Installing : librdkafka-1.6.1-102.el9.x86_64 10/154 2026-04-01T02:23:29.991 INFO:teuthology.orchestra.run.vm06.stdout: Installing : librabbitmq-0.11.0-7.el9.x86_64 11/154 2026-04-01T02:23:29.995 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libpmemobj-1.12.1-1.el9.x86_64 12/154 2026-04-01T02:23:29.999 INFO:teuthology.orchestra.run.vm06.stdout: Installing : lmdb-libs-0.9.29-3.el9.x86_64 13/154 2026-04-01T02:23:30.004 INFO:teuthology.orchestra.run.vm06.stdout: Installing : liboath-2.6.12-1.el9.x86_64 14/154 2026-04-01T02:23:30.157 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libnbd-1.20.3-4.el9.x86_64 15/154 2026-04-01T02:23:30.160 INFO:teuthology.orchestra.run.vm06.stdout: Upgrading : librbd1-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 16/154 2026-04-01T02:23:30.177 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: librbd1-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 16/154 2026-04-01T02:23:30.222 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-jaraco-8.2.1-3.el9.noarch 17/154 2026-04-01T02:23:30.233 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-lxml-4.6.5-3.el9.x86_64 18/154 2026-04-01T02:23:30.245 INFO:teuthology.orchestra.run.vm06.stdout: Installing : xmlsec1-1.2.29-13.el9.x86_64 19/154 2026-04-01T02:23:30.246 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libcephsqlite-2:20.2.0-8.g0597158282e.el9.clyso. 20/154 2026-04-01T02:23:30.278 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: libcephsqlite-2:20.2.0-8.g0597158282e.el9.clyso. 20/154 2026-04-01T02:23:30.280 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libradosstriper1-2:20.2.0-8.g0597158282e.el9.cly 21/154 2026-04-01T02:23:30.331 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: libradosstriper1-2:20.2.0-8.g0597158282e.el9.cly 21/154 2026-04-01T02:23:30.347 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-pyasn1-0.4.8-7.el9_7.noarch 22/154 2026-04-01T02:23:30.356 INFO:teuthology.orchestra.run.vm06.stdout: Installing : protobuf-3.14.0-17.el9_7.x86_64 23/154 2026-04-01T02:23:30.360 INFO:teuthology.orchestra.run.vm06.stdout: Installing : lua-5.4.4-4.el9.x86_64 24/154 2026-04-01T02:23:30.367 INFO:teuthology.orchestra.run.vm06.stdout: Installing : flexiblas-3.0.4-8.el9.0.1.x86_64 25/154 2026-04-01T02:23:30.397 INFO:teuthology.orchestra.run.vm06.stdout: Installing : unzip-6.0-59.el9.x86_64 26/154 2026-04-01T02:23:30.423 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-urllib3-1.26.5-6.el9_7.1.noarch 27/154 2026-04-01T02:23:30.430 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-requests-2.25.1-10.el9_6.noarch 28/154 2026-04-01T02:23:30.441 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libquadmath-11.5.0-11.el9.x86_64 29/154 2026-04-01T02:23:30.444 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libgfortran-11.5.0-11.el9.x86_64 30/154 2026-04-01T02:23:30.449 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ledmon-libs-1.1.0-3.el9.x86_64 31/154 2026-04-01T02:23:30.488 INFO:teuthology.orchestra.run.vm06.stdout: Installing : re2-1:20211101-20.el9.x86_64 32/154 2026-04-01T02:23:30.531 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libarrow-9.0.0-15.el9.x86_64 33/154 2026-04-01T02:23:30.540 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-ceph-common-2:20.2.0-8.g0597158282e.el9. 34/154 2026-04-01T02:23:30.552 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-ceph-argparse-2:20.2.0-8.g0597158282e.el 35/154 2026-04-01T02:23:30.567 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-cephfs-2:20.2.0-8.g0597158282e.el9.clyso 36/154 2026-04-01T02:23:30.574 INFO:teuthology.orchestra.run.vm08.stdout: Preparing : 1/1 2026-04-01T02:23:30.577 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-requests-oauthlib-1.3.0-12.el9.noarch 37/154 2026-04-01T02:23:30.583 INFO:teuthology.orchestra.run.vm08.stdout: Installing : thrift-0.15.0-4.el9.x86_64 1/154 2026-04-01T02:23:30.597 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-more-itertools-8.12.0-2.el9.noarch 2/154 2026-04-01T02:23:30.612 INFO:teuthology.orchestra.run.vm06.stdout: Installing : zip-3.0-35.el9.x86_64 38/154 2026-04-01T02:23:30.618 INFO:teuthology.orchestra.run.vm06.stdout: Installing : luarocks-3.9.2-5.el9.noarch 39/154 2026-04-01T02:23:30.626 INFO:teuthology.orchestra.run.vm06.stdout: Installing : lua-devel-5.4.4-4.el9.x86_64 40/154 2026-04-01T02:23:30.643 INFO:teuthology.orchestra.run.vm06.stdout: Installing : protobuf-compiler-3.14.0-17.el9_7.x86_64 41/154 2026-04-01T02:23:30.711 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-rsa-4.9-2.el9.noarch 42/154 2026-04-01T02:23:30.716 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-pyasn1-modules-0.4.8-7.el9_7.noarch 43/154 2026-04-01T02:23:30.723 INFO:teuthology.orchestra.run.vm06.stdout: Installing : xmlsec1-openssl-1.2.29-13.el9.x86_64 44/154 2026-04-01T02:23:30.731 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-xmlsec-1.3.13-1.el9.x86_64 45/154 2026-04-01T02:23:30.750 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-jaraco-classes-3.2.1-5.el9.noarch 46/154 2026-04-01T02:23:30.756 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-rbd-2:20.2.0-8.g0597158282e.el9.clyso.x8 47/154 2026-04-01T02:23:30.766 INFO:teuthology.orchestra.run.vm06.stdout: Installing : xmlstarlet-1.6.1-20.el9.x86_64 48/154 2026-04-01T02:23:30.778 INFO:teuthology.orchestra.run.vm06.stdout: Installing : librados-devel-2:20.2.0-8.g0597158282e.el9.clyso 49/154 2026-04-01T02:23:30.787 INFO:teuthology.orchestra.run.vm06.stdout: Installing : socat-1.7.4.1-8.el9.x86_64 50/154 2026-04-01T02:23:30.792 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-toml-0.10.2-6.el9.0.1.noarch 51/154 2026-04-01T02:23:30.794 INFO:teuthology.orchestra.run.vm08.stdout: Installing : lttng-ust-2.12.0-6.el9.x86_64 3/154 2026-04-01T02:23:30.797 INFO:teuthology.orchestra.run.vm08.stdout: Upgrading : librados2-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 4/154 2026-04-01T02:23:30.802 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-jaraco-functools-3.5.0-2.el9.noarch 52/154 2026-04-01T02:23:30.808 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-tempora-5.0.0-2.el9.noarch 53/154 2026-04-01T02:23:30.845 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-portend-3.1.0-2.el9.noarch 54/154 2026-04-01T02:23:30.857 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-protobuf-3.14.0-17.el9_7.noarch 55/154 2026-04-01T02:23:30.860 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: librados2-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 4/154 2026-04-01T02:23:30.863 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libcephfs2-2:20.2.0-8.g0597158282e.el9.clyso.x86 5/154 2026-04-01T02:23:30.867 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-markupsafe-1.1.1-12.el9.x86_64 56/154 2026-04-01T02:23:30.881 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: libcephfs2-2:20.2.0-8.g0597158282e.el9.clyso.x86 5/154 2026-04-01T02:23:30.884 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libcephfs-daemon-2:20.2.0-8.g0597158282e.el9.cly 6/154 2026-04-01T02:23:30.886 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libcephfs-proxy2-2:20.2.0-8.g0597158282e.el9.cly 7/154 2026-04-01T02:23:30.918 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: libcephfs-proxy2-2:20.2.0-8.g0597158282e.el9.cly 7/154 2026-04-01T02:23:30.921 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-jmespath-1.0.1-1.el9_7.noarch 57/154 2026-04-01T02:23:30.925 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-rados-2:20.2.0-8.g0597158282e.el9.clyso. 8/154 2026-04-01T02:23:30.936 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libxslt-1.1.34-13.el9_6.x86_64 9/154 2026-04-01T02:23:30.940 INFO:teuthology.orchestra.run.vm08.stdout: Installing : librdkafka-1.6.1-102.el9.x86_64 10/154 2026-04-01T02:23:30.946 INFO:teuthology.orchestra.run.vm08.stdout: Installing : librabbitmq-0.11.0-7.el9.x86_64 11/154 2026-04-01T02:23:30.951 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libpmemobj-1.12.1-1.el9.x86_64 12/154 2026-04-01T02:23:30.954 INFO:teuthology.orchestra.run.vm08.stdout: Installing : lmdb-libs-0.9.29-3.el9.x86_64 13/154 2026-04-01T02:23:30.960 INFO:teuthology.orchestra.run.vm08.stdout: Installing : liboath-2.6.12-1.el9.x86_64 14/154 2026-04-01T02:23:31.121 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libnbd-1.20.3-4.el9.x86_64 15/154 2026-04-01T02:23:31.124 INFO:teuthology.orchestra.run.vm08.stdout: Upgrading : librbd1-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 16/154 2026-04-01T02:23:31.140 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: librbd1-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 16/154 2026-04-01T02:23:31.186 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-jaraco-8.2.1-3.el9.noarch 17/154 2026-04-01T02:23:31.196 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-lxml-4.6.5-3.el9.x86_64 18/154 2026-04-01T02:23:31.206 INFO:teuthology.orchestra.run.vm08.stdout: Installing : xmlsec1-1.2.29-13.el9.x86_64 19/154 2026-04-01T02:23:31.227 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libcephsqlite-2:20.2.0-8.g0597158282e.el9.clyso. 20/154 2026-04-01T02:23:31.237 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-devel-3.9.23-2.el9.x86_64 58/154 2026-04-01T02:23:31.290 INFO:teuthology.orchestra.run.vm03.stdout:(152/152): ceph-test-20.2.0-8.g0597158282e.el9. 6.7 MB/s | 85 MB 00:12 2026-04-01T02:23:31.296 INFO:teuthology.orchestra.run.vm03.stdout:-------------------------------------------------------------------------------- 2026-04-01T02:23:31.296 INFO:teuthology.orchestra.run.vm03.stdout:Total 17 MB/s | 274 MB 00:15 2026-04-01T02:23:31.303 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: libcephsqlite-2:20.2.0-8.g0597158282e.el9.clyso. 20/154 2026-04-01T02:23:31.305 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libradosstriper1-2:20.2.0-8.g0597158282e.el9.cly 21/154 2026-04-01T02:23:31.315 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-babel-2.9.1-2.el9.noarch 59/154 2026-04-01T02:23:31.321 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-jinja2-2.11.3-8.el9_5.noarch 60/154 2026-04-01T02:23:31.325 INFO:teuthology.orchestra.run.vm06.stdout: Installing : perl-Benchmark-1.23-481.1.el9_6.noarch 61/154 2026-04-01T02:23:31.348 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: libradosstriper1-2:20.2.0-8.g0597158282e.el9.cly 21/154 2026-04-01T02:23:31.363 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-pyasn1-0.4.8-7.el9_7.noarch 22/154 2026-04-01T02:23:31.371 INFO:teuthology.orchestra.run.vm08.stdout: Installing : protobuf-3.14.0-17.el9_7.x86_64 23/154 2026-04-01T02:23:31.377 INFO:teuthology.orchestra.run.vm08.stdout: Installing : lua-5.4.4-4.el9.x86_64 24/154 2026-04-01T02:23:31.383 INFO:teuthology.orchestra.run.vm08.stdout: Installing : flexiblas-3.0.4-8.el9.0.1.x86_64 25/154 2026-04-01T02:23:31.396 INFO:teuthology.orchestra.run.vm06.stdout: Installing : openblas-0.3.29-1.el9.x86_64 62/154 2026-04-01T02:23:31.400 INFO:teuthology.orchestra.run.vm06.stdout: Installing : openblas-openmp-0.3.29-1.el9.x86_64 63/154 2026-04-01T02:23:31.414 INFO:teuthology.orchestra.run.vm08.stdout: Installing : unzip-6.0-59.el9.x86_64 26/154 2026-04-01T02:23:31.427 INFO:teuthology.orchestra.run.vm06.stdout: Installing : flexiblas-openblas-openmp-3.0.4-8.el9.0.1.x86_64 64/154 2026-04-01T02:23:31.433 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-urllib3-1.26.5-6.el9_7.1.noarch 27/154 2026-04-01T02:23:31.440 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-requests-2.25.1-10.el9_6.noarch 28/154 2026-04-01T02:23:31.448 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libquadmath-11.5.0-11.el9.x86_64 29/154 2026-04-01T02:23:31.452 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libgfortran-11.5.0-11.el9.x86_64 30/154 2026-04-01T02:23:31.457 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ledmon-libs-1.1.0-3.el9.x86_64 31/154 2026-04-01T02:23:31.495 INFO:teuthology.orchestra.run.vm08.stdout: Installing : re2-1:20211101-20.el9.x86_64 32/154 2026-04-01T02:23:31.536 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libarrow-9.0.0-15.el9.x86_64 33/154 2026-04-01T02:23:31.543 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-ceph-common-2:20.2.0-8.g0597158282e.el9. 34/154 2026-04-01T02:23:31.554 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-ceph-argparse-2:20.2.0-8.g0597158282e.el 35/154 2026-04-01T02:23:31.571 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-cephfs-2:20.2.0-8.g0597158282e.el9.clyso 36/154 2026-04-01T02:23:31.580 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-requests-oauthlib-1.3.0-12.el9.noarch 37/154 2026-04-01T02:23:31.613 INFO:teuthology.orchestra.run.vm08.stdout: Installing : zip-3.0-35.el9.x86_64 38/154 2026-04-01T02:23:31.619 INFO:teuthology.orchestra.run.vm08.stdout: Installing : luarocks-3.9.2-5.el9.noarch 39/154 2026-04-01T02:23:31.629 INFO:teuthology.orchestra.run.vm08.stdout: Installing : lua-devel-5.4.4-4.el9.x86_64 40/154 2026-04-01T02:23:31.646 INFO:teuthology.orchestra.run.vm08.stdout: Installing : protobuf-compiler-3.14.0-17.el9_7.x86_64 41/154 2026-04-01T02:23:31.718 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-rsa-4.9-2.el9.noarch 42/154 2026-04-01T02:23:31.723 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-pyasn1-modules-0.4.8-7.el9_7.noarch 43/154 2026-04-01T02:23:31.730 INFO:teuthology.orchestra.run.vm08.stdout: Installing : xmlsec1-openssl-1.2.29-13.el9.x86_64 44/154 2026-04-01T02:23:31.737 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-xmlsec-1.3.13-1.el9.x86_64 45/154 2026-04-01T02:23:31.757 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-jaraco-classes-3.2.1-5.el9.noarch 46/154 2026-04-01T02:23:31.762 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-rbd-2:20.2.0-8.g0597158282e.el9.clyso.x8 47/154 2026-04-01T02:23:31.775 INFO:teuthology.orchestra.run.vm08.stdout: Installing : xmlstarlet-1.6.1-20.el9.x86_64 48/154 2026-04-01T02:23:31.789 INFO:teuthology.orchestra.run.vm08.stdout: Installing : librados-devel-2:20.2.0-8.g0597158282e.el9.clyso 49/154 2026-04-01T02:23:31.797 INFO:teuthology.orchestra.run.vm08.stdout: Installing : socat-1.7.4.1-8.el9.x86_64 50/154 2026-04-01T02:23:31.803 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-toml-0.10.2-6.el9.0.1.noarch 51/154 2026-04-01T02:23:31.812 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-jaraco-functools-3.5.0-2.el9.noarch 52/154 2026-04-01T02:23:31.820 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-tempora-5.0.0-2.el9.noarch 53/154 2026-04-01T02:23:31.878 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-portend-3.1.0-2.el9.noarch 54/154 2026-04-01T02:23:31.890 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-protobuf-3.14.0-17.el9_7.noarch 55/154 2026-04-01T02:23:31.899 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-markupsafe-1.1.1-12.el9.x86_64 56/154 2026-04-01T02:23:31.902 INFO:teuthology.orchestra.run.vm06.stdout: Installing : flexiblas-netlib-3.0.4-8.el9.0.1.x86_64 65/154 2026-04-01T02:23:31.944 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-jmespath-1.0.1-1.el9_7.noarch 57/154 2026-04-01T02:23:32.004 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-numpy-1:1.23.5-2.el9_7.x86_64 66/154 2026-04-01T02:23:32.120 INFO:teuthology.orchestra.run.vm03.stdout:Running transaction check 2026-04-01T02:23:32.179 INFO:teuthology.orchestra.run.vm03.stdout:Transaction check succeeded. 2026-04-01T02:23:32.180 INFO:teuthology.orchestra.run.vm03.stdout:Running transaction test 2026-04-01T02:23:32.231 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-devel-3.9.23-2.el9.x86_64 58/154 2026-04-01T02:23:32.271 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-babel-2.9.1-2.el9.noarch 59/154 2026-04-01T02:23:32.276 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-jinja2-2.11.3-8.el9_5.noarch 60/154 2026-04-01T02:23:32.280 INFO:teuthology.orchestra.run.vm08.stdout: Installing : perl-Benchmark-1.23-481.1.el9_6.noarch 61/154 2026-04-01T02:23:32.353 INFO:teuthology.orchestra.run.vm08.stdout: Installing : openblas-0.3.29-1.el9.x86_64 62/154 2026-04-01T02:23:32.356 INFO:teuthology.orchestra.run.vm08.stdout: Installing : openblas-openmp-0.3.29-1.el9.x86_64 63/154 2026-04-01T02:23:32.390 INFO:teuthology.orchestra.run.vm08.stdout: Installing : flexiblas-openblas-openmp-3.0.4-8.el9.0.1.x86_64 64/154 2026-04-01T02:23:32.822 INFO:teuthology.orchestra.run.vm08.stdout: Installing : flexiblas-netlib-3.0.4-8.el9.0.1.x86_64 65/154 2026-04-01T02:23:32.883 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-numpy-f2py-1:1.23.5-2.el9_7.x86_64 67/154 2026-04-01T02:23:32.908 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-scipy-1.9.3-2.el9.x86_64 68/154 2026-04-01T02:23:32.922 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-numpy-1:1.23.5-2.el9_7.x86_64 66/154 2026-04-01T02:23:32.925 INFO:teuthology.orchestra.run.vm06.stdout: Installing : boost-program-options-1.75.0-13.el9_7.x86_64 69/154 2026-04-01T02:23:32.928 INFO:teuthology.orchestra.run.vm06.stdout: Installing : smartmontools-1:7.2-9.el9.x86_64 70/154 2026-04-01T02:23:32.944 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: smartmontools-1:7.2-9.el9.x86_64 70/154 2026-04-01T02:23:32.944 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/smartd.service → /usr/lib/systemd/system/smartd.service. 2026-04-01T02:23:32.944 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:32.967 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-pyparsing-2.4.7-9.el9.0.1.noarch 71/154 2026-04-01T02:23:32.978 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-packaging-20.9-5.el9.noarch 72/154 2026-04-01T02:23:32.997 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-ply-3.11-14.el9.0.1.noarch 73/154 2026-04-01T02:23:33.022 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-pycparser-2.20-6.el9.noarch 74/154 2026-04-01T02:23:33.135 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-cffi-1.14.5-5.el9.x86_64 75/154 2026-04-01T02:23:33.151 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-cryptography-36.0.1-5.el9_6.x86_64 76/154 2026-04-01T02:23:33.183 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-pyOpenSSL-21.0.0-1.el9.noarch 77/154 2026-04-01T02:23:33.193 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-cheroot-10.0.1-5.el9.noarch 78/154 2026-04-01T02:23:33.200 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-bcrypt-3.2.2-1.el9.x86_64 79/154 2026-04-01T02:23:33.203 INFO:teuthology.orchestra.run.vm06.stdout: Installing : pciutils-3.7.0-7.el9.x86_64 80/154 2026-04-01T02:23:33.240 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: qatlib-24.09.0-1.el9.x86_64 81/154 2026-04-01T02:23:33.245 INFO:teuthology.orchestra.run.vm06.stdout: Installing : qatlib-24.09.0-1.el9.x86_64 81/154 2026-04-01T02:23:33.246 INFO:teuthology.orchestra.run.vm06.stdout: Installing : qatlib-service-24.09.0-1.el9.x86_64 82/154 2026-04-01T02:23:33.268 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: qatlib-service-24.09.0-1.el9.x86_64 82/154 2026-04-01T02:23:33.279 INFO:teuthology.orchestra.run.vm03.stdout:Transaction test succeeded. 2026-04-01T02:23:33.279 INFO:teuthology.orchestra.run.vm03.stdout:Running transaction 2026-04-01T02:23:33.443 INFO:teuthology.orchestra.run.vm06.stdout: Installing : qatzip-libs-1.3.1-1.el9.x86_64 83/154 2026-04-01T02:23:33.448 INFO:teuthology.orchestra.run.vm06.stdout: Installing : nvme-cli-2.13-1.el9.x86_64 84/154 2026-04-01T02:23:33.798 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: nvme-cli-2.13-1.el9.x86_64 84/154 2026-04-01T02:23:33.798 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/default.target.wants/nvmefc-boot-connections.service → /usr/lib/systemd/system/nvmefc-boot-connections.service. 2026-04-01T02:23:33.798 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:33.803 INFO:teuthology.orchestra.run.vm06.stdout: Installing : mailcap-2.1.49-5.el9.0.2.noarch 85/154 2026-04-01T02:23:33.809 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libconfig-1.7.2-9.el9.x86_64 86/154 2026-04-01T02:23:33.837 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 87/154 2026-04-01T02:23:33.837 INFO:teuthology.orchestra.run.vm06.stdout:Creating group 'libstoragemgmt' with GID 992. 2026-04-01T02:23:33.837 INFO:teuthology.orchestra.run.vm06.stdout:Creating user 'libstoragemgmt' (daemon account for libstoragemgmt) with UID 992 and GID 992. 2026-04-01T02:23:33.837 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:33.852 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-numpy-f2py-1:1.23.5-2.el9_7.x86_64 67/154 2026-04-01T02:23:33.853 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libstoragemgmt-1.10.1-1.el9.x86_64 87/154 2026-04-01T02:23:33.875 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-scipy-1.9.3-2.el9.x86_64 68/154 2026-04-01T02:23:33.891 INFO:teuthology.orchestra.run.vm08.stdout: Installing : boost-program-options-1.75.0-13.el9_7.x86_64 69/154 2026-04-01T02:23:33.894 INFO:teuthology.orchestra.run.vm08.stdout: Installing : smartmontools-1:7.2-9.el9.x86_64 70/154 2026-04-01T02:23:33.897 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 87/154 2026-04-01T02:23:33.897 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/libstoragemgmt.service → /usr/lib/systemd/system/libstoragemgmt.service. 2026-04-01T02:23:33.897 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:33.912 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: smartmontools-1:7.2-9.el9.x86_64 70/154 2026-04-01T02:23:33.912 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/smartd.service → /usr/lib/systemd/system/smartd.service. 2026-04-01T02:23:33.912 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:33.927 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-libstoragemgmt-1.10.1-1.el9.x86_64 88/154 2026-04-01T02:23:33.932 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-pyparsing-2.4.7-9.el9.0.1.noarch 71/154 2026-04-01T02:23:33.942 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-packaging-20.9-5.el9.noarch 72/154 2026-04-01T02:23:33.958 INFO:teuthology.orchestra.run.vm06.stdout: Installing : fuse-2.9.9-17.el9.x86_64 89/154 2026-04-01T02:23:33.961 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-ply-3.11-14.el9.0.1.noarch 73/154 2026-04-01T02:23:33.966 INFO:teuthology.orchestra.run.vm06.stdout: Installing : cryptsetup-2.7.2-4.el9.x86_64 90/154 2026-04-01T02:23:33.973 INFO:teuthology.orchestra.run.vm06.stdout: Installing : c-ares-1.19.1-2.el9_4.x86_64 91/154 2026-04-01T02:23:33.982 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-zc-lockfile-2.0-10.el9.noarch 92/154 2026-04-01T02:23:33.983 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-pycparser-2.20-6.el9.noarch 74/154 2026-04-01T02:23:34.016 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-xmltodict-0.12.0-15.el9.noarch 93/154 2026-04-01T02:23:34.028 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-websocket-client-1.2.3-2.el9.noarch 94/154 2026-04-01T02:23:34.117 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-cffi-1.14.5-5.el9.x86_64 75/154 2026-04-01T02:23:34.127 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-typing-extensions-4.15.0-1.el9.noarch 95/154 2026-04-01T02:23:34.206 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-cryptography-36.0.1-5.el9_6.x86_64 76/154 2026-04-01T02:23:34.233 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-asyncssh-2.13.2-5.el9.noarch 96/154 2026-04-01T02:23:34.255 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-pyOpenSSL-21.0.0-1.el9.noarch 77/154 2026-04-01T02:23:34.314 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-cheroot-10.0.1-5.el9.noarch 78/154 2026-04-01T02:23:34.317 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-repoze-lru-0.7-16.el9.noarch 97/154 2026-04-01T02:23:34.322 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-bcrypt-3.2.2-1.el9.x86_64 79/154 2026-04-01T02:23:34.326 INFO:teuthology.orchestra.run.vm08.stdout: Installing : pciutils-3.7.0-7.el9.x86_64 80/154 2026-04-01T02:23:34.330 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-routes-2.5.1-5.el9.noarch 98/154 2026-04-01T02:23:34.340 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-natsort-7.1.1-5.el9.noarch 99/154 2026-04-01T02:23:34.366 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: qatlib-24.09.0-1.el9.x86_64 81/154 2026-04-01T02:23:34.372 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-msgpack-1.0.3-2.el9.x86_64 100/154 2026-04-01T02:23:34.372 INFO:teuthology.orchestra.run.vm08.stdout: Installing : qatlib-24.09.0-1.el9.x86_64 81/154 2026-04-01T02:23:34.375 INFO:teuthology.orchestra.run.vm08.stdout: Installing : qatlib-service-24.09.0-1.el9.x86_64 82/154 2026-04-01T02:23:34.395 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-influxdb-5.3.1-1.el9.noarch 101/154 2026-04-01T02:23:34.400 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: qatlib-service-24.09.0-1.el9.x86_64 82/154 2026-04-01T02:23:34.421 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-isodate-0.6.1-3.el9.noarch 102/154 2026-04-01T02:23:34.430 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-saml-1.16.0-1.el9.noarch 103/154 2026-04-01T02:23:34.441 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-certifi-2023.05.07-4.el9.noarch 104/154 2026-04-01T02:23:34.494 INFO:teuthology.orchestra.run.vm03.stdout: Preparing : 1/1 2026-04-01T02:23:34.496 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-cachetools-4.2.4-1.el9.noarch 105/154 2026-04-01T02:23:34.506 INFO:teuthology.orchestra.run.vm03.stdout: Installing : thrift-0.15.0-4.el9.x86_64 1/154 2026-04-01T02:23:34.520 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-more-itertools-8.12.0-2.el9.noarch 2/154 2026-04-01T02:23:34.563 INFO:teuthology.orchestra.run.vm08.stdout: Installing : qatzip-libs-1.3.1-1.el9.x86_64 83/154 2026-04-01T02:23:34.568 INFO:teuthology.orchestra.run.vm08.stdout: Installing : nvme-cli-2.13-1.el9.x86_64 84/154 2026-04-01T02:23:34.721 INFO:teuthology.orchestra.run.vm03.stdout: Installing : lttng-ust-2.12.0-6.el9.x86_64 3/154 2026-04-01T02:23:34.728 INFO:teuthology.orchestra.run.vm03.stdout: Upgrading : librados2-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 4/154 2026-04-01T02:23:34.791 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: librados2-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 4/154 2026-04-01T02:23:34.794 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libcephfs2-2:20.2.0-8.g0597158282e.el9.clyso.x86 5/154 2026-04-01T02:23:34.814 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: libcephfs2-2:20.2.0-8.g0597158282e.el9.clyso.x86 5/154 2026-04-01T02:23:34.819 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libcephfs-daemon-2:20.2.0-8.g0597158282e.el9.cly 6/154 2026-04-01T02:23:34.822 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libcephfs-proxy2-2:20.2.0-8.g0597158282e.el9.cly 7/154 2026-04-01T02:23:34.863 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: libcephfs-proxy2-2:20.2.0-8.g0597158282e.el9.cly 7/154 2026-04-01T02:23:34.870 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-rados-2:20.2.0-8.g0597158282e.el9.clyso. 8/154 2026-04-01T02:23:34.882 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: nvme-cli-2.13-1.el9.x86_64 84/154 2026-04-01T02:23:34.882 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/default.target.wants/nvmefc-boot-connections.service → /usr/lib/systemd/system/nvmefc-boot-connections.service. 2026-04-01T02:23:34.882 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:34.883 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libxslt-1.1.34-13.el9_6.x86_64 9/154 2026-04-01T02:23:34.889 INFO:teuthology.orchestra.run.vm03.stdout: Installing : librdkafka-1.6.1-102.el9.x86_64 10/154 2026-04-01T02:23:34.893 INFO:teuthology.orchestra.run.vm08.stdout: Installing : mailcap-2.1.49-5.el9.0.2.noarch 85/154 2026-04-01T02:23:34.896 INFO:teuthology.orchestra.run.vm03.stdout: Installing : librabbitmq-0.11.0-7.el9.x86_64 11/154 2026-04-01T02:23:34.897 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libconfig-1.7.2-9.el9.x86_64 86/154 2026-04-01T02:23:34.914 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libpmemobj-1.12.1-1.el9.x86_64 12/154 2026-04-01T02:23:34.918 INFO:teuthology.orchestra.run.vm03.stdout: Installing : lmdb-libs-0.9.29-3.el9.x86_64 13/154 2026-04-01T02:23:34.924 INFO:teuthology.orchestra.run.vm03.stdout: Installing : liboath-2.6.12-1.el9.x86_64 14/154 2026-04-01T02:23:34.927 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 87/154 2026-04-01T02:23:34.927 INFO:teuthology.orchestra.run.vm08.stdout:Creating group 'libstoragemgmt' with GID 992. 2026-04-01T02:23:34.927 INFO:teuthology.orchestra.run.vm08.stdout:Creating user 'libstoragemgmt' (daemon account for libstoragemgmt) with UID 992 and GID 992. 2026-04-01T02:23:34.927 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:34.941 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libstoragemgmt-1.10.1-1.el9.x86_64 87/154 2026-04-01T02:23:34.972 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-google-auth-1:2.45.0-1.el9.noarch 106/154 2026-04-01T02:23:34.974 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 87/154 2026-04-01T02:23:34.974 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/libstoragemgmt.service → /usr/lib/systemd/system/libstoragemgmt.service. 2026-04-01T02:23:34.974 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:34.991 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-kubernetes-1:26.1.0-3.el9.noarch 107/154 2026-04-01T02:23:34.998 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-backports-tarfile-1.2.0-1.el9.noarch 108/154 2026-04-01T02:23:35.001 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-libstoragemgmt-1.10.1-1.el9.x86_64 88/154 2026-04-01T02:23:35.007 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-jaraco-context-6.0.1-3.el9.noarch 109/154 2026-04-01T02:23:35.015 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-autocommand-2.2.2-8.el9.noarch 110/154 2026-04-01T02:23:35.022 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-jaraco-text-4.0.0-2.el9.noarch 111/154 2026-04-01T02:23:35.032 INFO:teuthology.orchestra.run.vm08.stdout: Installing : fuse-2.9.9-17.el9.x86_64 89/154 2026-04-01T02:23:35.039 INFO:teuthology.orchestra.run.vm08.stdout: Installing : cryptsetup-2.7.2-4.el9.x86_64 90/154 2026-04-01T02:23:35.051 INFO:teuthology.orchestra.run.vm08.stdout: Installing : c-ares-1.19.1-2.el9_4.x86_64 91/154 2026-04-01T02:23:35.056 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-zc-lockfile-2.0-10.el9.noarch 92/154 2026-04-01T02:23:35.068 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-jaraco-collections-3.0.0-8.el9.noarch 112/154 2026-04-01T02:23:35.075 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-xmltodict-0.12.0-15.el9.noarch 93/154 2026-04-01T02:23:35.077 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-cherrypy-18.10.0-5.el9.noarch 113/154 2026-04-01T02:23:35.081 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libnbd-1.20.3-4.el9.x86_64 15/154 2026-04-01T02:23:35.084 INFO:teuthology.orchestra.run.vm03.stdout: Upgrading : librbd1-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 16/154 2026-04-01T02:23:35.087 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-websocket-client-1.2.3-2.el9.noarch 94/154 2026-04-01T02:23:35.089 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libunwind-1.6.2-1.el9.x86_64 114/154 2026-04-01T02:23:35.095 INFO:teuthology.orchestra.run.vm06.stdout: Installing : gperftools-libs-2.9.1-3.el9.x86_64 115/154 2026-04-01T02:23:35.102 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: librbd1-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 16/154 2026-04-01T02:23:35.104 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libarrow-doc-9.0.0-15.el9.noarch 116/154 2026-04-01T02:23:35.145 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-jaraco-8.2.1-3.el9.noarch 17/154 2026-04-01T02:23:35.154 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-lxml-4.6.5-3.el9.x86_64 18/154 2026-04-01T02:23:35.165 INFO:teuthology.orchestra.run.vm03.stdout: Installing : xmlsec1-1.2.29-13.el9.x86_64 19/154 2026-04-01T02:23:35.166 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-typing-extensions-4.15.0-1.el9.noarch 95/154 2026-04-01T02:23:35.167 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libcephsqlite-2:20.2.0-8.g0597158282e.el9.clyso. 20/154 2026-04-01T02:23:35.179 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-asyncssh-2.13.2-5.el9.noarch 96/154 2026-04-01T02:23:35.196 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-repoze-lru-0.7-16.el9.noarch 97/154 2026-04-01T02:23:35.199 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: libcephsqlite-2:20.2.0-8.g0597158282e.el9.clyso. 20/154 2026-04-01T02:23:35.201 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libradosstriper1-2:20.2.0-8.g0597158282e.el9.cly 21/154 2026-04-01T02:23:35.210 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-routes-2.5.1-5.el9.noarch 98/154 2026-04-01T02:23:35.221 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-natsort-7.1.1-5.el9.noarch 99/154 2026-04-01T02:23:35.250 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: libradosstriper1-2:20.2.0-8.g0597158282e.el9.cly 21/154 2026-04-01T02:23:35.253 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-msgpack-1.0.3-2.el9.x86_64 100/154 2026-04-01T02:23:35.267 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-pyasn1-0.4.8-7.el9_7.noarch 22/154 2026-04-01T02:23:35.275 INFO:teuthology.orchestra.run.vm03.stdout: Installing : protobuf-3.14.0-17.el9_7.x86_64 23/154 2026-04-01T02:23:35.276 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-influxdb-5.3.1-1.el9.noarch 101/154 2026-04-01T02:23:35.280 INFO:teuthology.orchestra.run.vm03.stdout: Installing : lua-5.4.4-4.el9.x86_64 24/154 2026-04-01T02:23:35.287 INFO:teuthology.orchestra.run.vm03.stdout: Installing : flexiblas-3.0.4-8.el9.0.1.x86_64 25/154 2026-04-01T02:23:35.301 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-isodate-0.6.1-3.el9.noarch 102/154 2026-04-01T02:23:35.309 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-saml-1.16.0-1.el9.noarch 103/154 2026-04-01T02:23:35.323 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-certifi-2023.05.07-4.el9.noarch 104/154 2026-04-01T02:23:35.331 INFO:teuthology.orchestra.run.vm03.stdout: Installing : unzip-6.0-59.el9.x86_64 26/154 2026-04-01T02:23:35.350 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-urllib3-1.26.5-6.el9_7.1.noarch 27/154 2026-04-01T02:23:35.358 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-requests-2.25.1-10.el9_6.noarch 28/154 2026-04-01T02:23:35.368 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libquadmath-11.5.0-11.el9.x86_64 29/154 2026-04-01T02:23:35.372 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libgfortran-11.5.0-11.el9.x86_64 30/154 2026-04-01T02:23:35.378 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ledmon-libs-1.1.0-3.el9.x86_64 31/154 2026-04-01T02:23:35.384 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-cachetools-4.2.4-1.el9.noarch 105/154 2026-04-01T02:23:35.421 INFO:teuthology.orchestra.run.vm03.stdout: Installing : re2-1:20211101-20.el9.x86_64 32/154 2026-04-01T02:23:35.466 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libarrow-9.0.0-15.el9.x86_64 33/154 2026-04-01T02:23:35.469 INFO:teuthology.orchestra.run.vm06.stdout: Installing : parquet-libs-9.0.0-15.el9.x86_64 117/154 2026-04-01T02:23:35.475 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-ceph-common-2:20.2.0-8.g0597158282e.el9. 34/154 2026-04-01T02:23:35.482 INFO:teuthology.orchestra.run.vm06.stdout: Installing : librgw2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 118/154 2026-04-01T02:23:35.493 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-ceph-argparse-2:20.2.0-8.g0597158282e.el 35/154 2026-04-01T02:23:35.512 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-cephfs-2:20.2.0-8.g0597158282e.el9.clyso 36/154 2026-04-01T02:23:35.514 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: librgw2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 118/154 2026-04-01T02:23:35.516 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-rgw-2:20.2.0-8.g0597158282e.el9.clyso.x8 119/154 2026-04-01T02:23:35.526 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-requests-oauthlib-1.3.0-12.el9.noarch 37/154 2026-04-01T02:23:35.574 INFO:teuthology.orchestra.run.vm03.stdout: Installing : zip-3.0-35.el9.x86_64 38/154 2026-04-01T02:23:35.583 INFO:teuthology.orchestra.run.vm03.stdout: Installing : luarocks-3.9.2-5.el9.noarch 39/154 2026-04-01T02:23:35.595 INFO:teuthology.orchestra.run.vm03.stdout: Installing : lua-devel-5.4.4-4.el9.x86_64 40/154 2026-04-01T02:23:35.617 INFO:teuthology.orchestra.run.vm03.stdout: Installing : protobuf-compiler-3.14.0-17.el9_7.x86_64 41/154 2026-04-01T02:23:35.694 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-rsa-4.9-2.el9.noarch 42/154 2026-04-01T02:23:35.700 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-pyasn1-modules-0.4.8-7.el9_7.noarch 43/154 2026-04-01T02:23:35.709 INFO:teuthology.orchestra.run.vm03.stdout: Installing : xmlsec1-openssl-1.2.29-13.el9.x86_64 44/154 2026-04-01T02:23:35.721 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-xmlsec-1.3.13-1.el9.x86_64 45/154 2026-04-01T02:23:35.785 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-jaraco-classes-3.2.1-5.el9.noarch 46/154 2026-04-01T02:23:35.792 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-rbd-2:20.2.0-8.g0597158282e.el9.clyso.x8 47/154 2026-04-01T02:23:35.850 INFO:teuthology.orchestra.run.vm03.stdout: Installing : xmlstarlet-1.6.1-20.el9.x86_64 48/154 2026-04-01T02:23:35.860 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-google-auth-1:2.45.0-1.el9.noarch 106/154 2026-04-01T02:23:35.895 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-kubernetes-1:26.1.0-3.el9.noarch 107/154 2026-04-01T02:23:35.895 INFO:teuthology.orchestra.run.vm03.stdout: Installing : librados-devel-2:20.2.0-8.g0597158282e.el9.clyso 49/154 2026-04-01T02:23:35.904 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-backports-tarfile-1.2.0-1.el9.noarch 108/154 2026-04-01T02:23:35.907 INFO:teuthology.orchestra.run.vm03.stdout: Installing : socat-1.7.4.1-8.el9.x86_64 50/154 2026-04-01T02:23:35.914 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-toml-0.10.2-6.el9.0.1.noarch 51/154 2026-04-01T02:23:35.918 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-jaraco-context-6.0.1-3.el9.noarch 109/154 2026-04-01T02:23:35.924 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-jaraco-functools-3.5.0-2.el9.noarch 52/154 2026-04-01T02:23:35.929 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-tempora-5.0.0-2.el9.noarch 53/154 2026-04-01T02:23:35.930 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-autocommand-2.2.2-8.el9.noarch 110/154 2026-04-01T02:23:35.938 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-jaraco-text-4.0.0-2.el9.noarch 111/154 2026-04-01T02:23:35.970 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-portend-3.1.0-2.el9.noarch 54/154 2026-04-01T02:23:35.982 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-protobuf-3.14.0-17.el9_7.noarch 55/154 2026-04-01T02:23:35.989 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-jaraco-collections-3.0.0-8.el9.noarch 112/154 2026-04-01T02:23:35.993 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-markupsafe-1.1.1-12.el9.x86_64 56/154 2026-04-01T02:23:35.999 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-cherrypy-18.10.0-5.el9.noarch 113/154 2026-04-01T02:23:36.010 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libunwind-1.6.2-1.el9.x86_64 114/154 2026-04-01T02:23:36.016 INFO:teuthology.orchestra.run.vm08.stdout: Installing : gperftools-libs-2.9.1-3.el9.x86_64 115/154 2026-04-01T02:23:36.025 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libarrow-doc-9.0.0-15.el9.noarch 116/154 2026-04-01T02:23:36.045 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-jmespath-1.0.1-1.el9_7.noarch 57/154 2026-04-01T02:23:36.345 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-devel-3.9.23-2.el9.x86_64 58/154 2026-04-01T02:23:36.378 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-babel-2.9.1-2.el9.noarch 59/154 2026-04-01T02:23:36.380 INFO:teuthology.orchestra.run.vm08.stdout: Installing : parquet-libs-9.0.0-15.el9.x86_64 117/154 2026-04-01T02:23:36.382 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-jinja2-2.11.3-8.el9_5.noarch 60/154 2026-04-01T02:23:36.412 INFO:teuthology.orchestra.run.vm08.stdout: Installing : librgw2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 118/154 2026-04-01T02:23:36.414 INFO:teuthology.orchestra.run.vm03.stdout: Installing : perl-Benchmark-1.23-481.1.el9_6.noarch 61/154 2026-04-01T02:23:36.438 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: librgw2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 118/154 2026-04-01T02:23:36.441 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-rgw-2:20.2.0-8.g0597158282e.el9.clyso.x8 119/154 2026-04-01T02:23:36.487 INFO:teuthology.orchestra.run.vm03.stdout: Installing : openblas-0.3.29-1.el9.x86_64 62/154 2026-04-01T02:23:36.491 INFO:teuthology.orchestra.run.vm03.stdout: Installing : openblas-openmp-0.3.29-1.el9.x86_64 63/154 2026-04-01T02:23:36.518 INFO:teuthology.orchestra.run.vm03.stdout: Installing : flexiblas-openblas-openmp-3.0.4-8.el9.0.1.x86_64 64/154 2026-04-01T02:23:36.963 INFO:teuthology.orchestra.run.vm03.stdout: Installing : flexiblas-netlib-3.0.4-8.el9.0.1.x86_64 65/154 2026-04-01T02:23:36.991 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 120/154 2026-04-01T02:23:36.997 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 120/154 2026-04-01T02:23:37.081 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-numpy-1:1.23.5-2.el9_7.x86_64 66/154 2026-04-01T02:23:37.444 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 120/154 2026-04-01T02:23:37.452 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 121/154 2026-04-01T02:23:37.499 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 121/154 2026-04-01T02:23:37.499 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph.target → /usr/lib/systemd/system/ceph.target. 2026-04-01T02:23:37.500 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-crash.service → /usr/lib/systemd/system/ceph-crash.service. 2026-04-01T02:23:37.500 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:37.508 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-selinux-2:20.2.0-8.g0597158282e.el9.clyso.x 122/154 2026-04-01T02:23:37.852 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 120/154 2026-04-01T02:23:37.858 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 120/154 2026-04-01T02:23:38.055 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-numpy-f2py-1:1.23.5-2.el9_7.x86_64 67/154 2026-04-01T02:23:38.077 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-scipy-1.9.3-2.el9.x86_64 68/154 2026-04-01T02:23:38.091 INFO:teuthology.orchestra.run.vm03.stdout: Installing : boost-program-options-1.75.0-13.el9_7.x86_64 69/154 2026-04-01T02:23:38.094 INFO:teuthology.orchestra.run.vm03.stdout: Installing : smartmontools-1:7.2-9.el9.x86_64 70/154 2026-04-01T02:23:38.116 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: smartmontools-1:7.2-9.el9.x86_64 70/154 2026-04-01T02:23:38.116 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/smartd.service → /usr/lib/systemd/system/smartd.service. 2026-04-01T02:23:38.116 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:38.137 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-pyparsing-2.4.7-9.el9.0.1.noarch 71/154 2026-04-01T02:23:38.148 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-packaging-20.9-5.el9.noarch 72/154 2026-04-01T02:23:38.169 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-ply-3.11-14.el9.0.1.noarch 73/154 2026-04-01T02:23:38.197 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-pycparser-2.20-6.el9.noarch 74/154 2026-04-01T02:23:38.274 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 120/154 2026-04-01T02:23:38.283 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 121/154 2026-04-01T02:23:38.319 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-cffi-1.14.5-5.el9.x86_64 75/154 2026-04-01T02:23:38.335 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-cryptography-36.0.1-5.el9_6.x86_64 76/154 2026-04-01T02:23:38.336 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 121/154 2026-04-01T02:23:38.336 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph.target → /usr/lib/systemd/system/ceph.target. 2026-04-01T02:23:38.336 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-crash.service → /usr/lib/systemd/system/ceph-crash.service. 2026-04-01T02:23:38.336 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:38.343 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-selinux-2:20.2.0-8.g0597158282e.el9.clyso.x 122/154 2026-04-01T02:23:38.371 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-pyOpenSSL-21.0.0-1.el9.noarch 77/154 2026-04-01T02:23:38.382 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-cheroot-10.0.1-5.el9.noarch 78/154 2026-04-01T02:23:38.391 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-bcrypt-3.2.2-1.el9.x86_64 79/154 2026-04-01T02:23:38.394 INFO:teuthology.orchestra.run.vm03.stdout: Installing : pciutils-3.7.0-7.el9.x86_64 80/154 2026-04-01T02:23:38.434 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: qatlib-24.09.0-1.el9.x86_64 81/154 2026-04-01T02:23:38.439 INFO:teuthology.orchestra.run.vm03.stdout: Installing : qatlib-24.09.0-1.el9.x86_64 81/154 2026-04-01T02:23:38.441 INFO:teuthology.orchestra.run.vm03.stdout: Installing : qatlib-service-24.09.0-1.el9.x86_64 82/154 2026-04-01T02:23:38.466 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: qatlib-service-24.09.0-1.el9.x86_64 82/154 2026-04-01T02:23:38.617 INFO:teuthology.orchestra.run.vm03.stdout: Installing : qatzip-libs-1.3.1-1.el9.x86_64 83/154 2026-04-01T02:23:38.632 INFO:teuthology.orchestra.run.vm03.stdout: Installing : nvme-cli-2.13-1.el9.x86_64 84/154 2026-04-01T02:23:39.007 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: nvme-cli-2.13-1.el9.x86_64 84/154 2026-04-01T02:23:39.007 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/default.target.wants/nvmefc-boot-connections.service → /usr/lib/systemd/system/nvmefc-boot-connections.service. 2026-04-01T02:23:39.007 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:39.012 INFO:teuthology.orchestra.run.vm03.stdout: Installing : mailcap-2.1.49-5.el9.0.2.noarch 85/154 2026-04-01T02:23:39.016 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libconfig-1.7.2-9.el9.x86_64 86/154 2026-04-01T02:23:39.058 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 87/154 2026-04-01T02:23:39.058 INFO:teuthology.orchestra.run.vm03.stdout:Creating group 'libstoragemgmt' with GID 992. 2026-04-01T02:23:39.058 INFO:teuthology.orchestra.run.vm03.stdout:Creating user 'libstoragemgmt' (daemon account for libstoragemgmt) with UID 992 and GID 992. 2026-04-01T02:23:39.058 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:39.078 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libstoragemgmt-1.10.1-1.el9.x86_64 87/154 2026-04-01T02:23:39.123 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 87/154 2026-04-01T02:23:39.123 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/libstoragemgmt.service → /usr/lib/systemd/system/libstoragemgmt.service. 2026-04-01T02:23:39.123 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:39.157 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-libstoragemgmt-1.10.1-1.el9.x86_64 88/154 2026-04-01T02:23:39.188 INFO:teuthology.orchestra.run.vm03.stdout: Installing : fuse-2.9.9-17.el9.x86_64 89/154 2026-04-01T02:23:39.195 INFO:teuthology.orchestra.run.vm03.stdout: Installing : cryptsetup-2.7.2-4.el9.x86_64 90/154 2026-04-01T02:23:39.202 INFO:teuthology.orchestra.run.vm03.stdout: Installing : c-ares-1.19.1-2.el9_4.x86_64 91/154 2026-04-01T02:23:39.208 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-zc-lockfile-2.0-10.el9.noarch 92/154 2026-04-01T02:23:39.227 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-xmltodict-0.12.0-15.el9.noarch 93/154 2026-04-01T02:23:39.237 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-websocket-client-1.2.3-2.el9.noarch 94/154 2026-04-01T02:23:39.315 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-typing-extensions-4.15.0-1.el9.noarch 95/154 2026-04-01T02:23:39.331 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-asyncssh-2.13.2-5.el9.noarch 96/154 2026-04-01T02:23:39.353 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-repoze-lru-0.7-16.el9.noarch 97/154 2026-04-01T02:23:39.371 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-routes-2.5.1-5.el9.noarch 98/154 2026-04-01T02:23:39.385 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-natsort-7.1.1-5.el9.noarch 99/154 2026-04-01T02:23:39.420 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-msgpack-1.0.3-2.el9.x86_64 100/154 2026-04-01T02:23:39.438 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-influxdb-5.3.1-1.el9.noarch 101/154 2026-04-01T02:23:39.468 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-isodate-0.6.1-3.el9.noarch 102/154 2026-04-01T02:23:39.478 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-saml-1.16.0-1.el9.noarch 103/154 2026-04-01T02:23:39.493 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-certifi-2023.05.07-4.el9.noarch 104/154 2026-04-01T02:23:39.549 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-cachetools-4.2.4-1.el9.noarch 105/154 2026-04-01T02:23:40.000 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-google-auth-1:2.45.0-1.el9.noarch 106/154 2026-04-01T02:23:40.061 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-kubernetes-1:26.1.0-3.el9.noarch 107/154 2026-04-01T02:23:40.069 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-backports-tarfile-1.2.0-1.el9.noarch 108/154 2026-04-01T02:23:40.078 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-jaraco-context-6.0.1-3.el9.noarch 109/154 2026-04-01T02:23:40.088 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-autocommand-2.2.2-8.el9.noarch 110/154 2026-04-01T02:23:40.094 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-jaraco-text-4.0.0-2.el9.noarch 111/154 2026-04-01T02:23:40.135 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-jaraco-collections-3.0.0-8.el9.noarch 112/154 2026-04-01T02:23:40.143 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-cherrypy-18.10.0-5.el9.noarch 113/154 2026-04-01T02:23:40.152 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libunwind-1.6.2-1.el9.x86_64 114/154 2026-04-01T02:23:40.157 INFO:teuthology.orchestra.run.vm03.stdout: Installing : gperftools-libs-2.9.1-3.el9.x86_64 115/154 2026-04-01T02:23:40.165 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libarrow-doc-9.0.0-15.el9.noarch 116/154 2026-04-01T02:23:40.518 INFO:teuthology.orchestra.run.vm03.stdout: Installing : parquet-libs-9.0.0-15.el9.x86_64 117/154 2026-04-01T02:23:40.521 INFO:teuthology.orchestra.run.vm03.stdout: Installing : librgw2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 118/154 2026-04-01T02:23:40.547 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: librgw2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 118/154 2026-04-01T02:23:40.549 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-rgw-2:20.2.0-8.g0597158282e.el9.clyso.x8 119/154 2026-04-01T02:23:41.949 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 120/154 2026-04-01T02:23:41.955 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 120/154 2026-04-01T02:23:42.346 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 120/154 2026-04-01T02:23:42.355 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 121/154 2026-04-01T02:23:42.408 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 121/154 2026-04-01T02:23:42.408 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph.target → /usr/lib/systemd/system/ceph.target. 2026-04-01T02:23:42.408 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-crash.service → /usr/lib/systemd/system/ceph-crash.service. 2026-04-01T02:23:42.408 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:42.415 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-selinux-2:20.2.0-8.g0597158282e.el9.clyso.x 122/154 2026-04-01T02:23:44.668 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-selinux-2:20.2.0-8.g0597158282e.el9.clyso.x 122/154 2026-04-01T02:23:44.668 INFO:teuthology.orchestra.run.vm06.stdout:skipping the directory /sys 2026-04-01T02:23:44.668 INFO:teuthology.orchestra.run.vm06.stdout:skipping the directory /proc 2026-04-01T02:23:44.668 INFO:teuthology.orchestra.run.vm06.stdout:skipping the directory /mnt 2026-04-01T02:23:44.668 INFO:teuthology.orchestra.run.vm06.stdout:skipping the directory /var/tmp 2026-04-01T02:23:44.668 INFO:teuthology.orchestra.run.vm06.stdout:skipping the directory /home 2026-04-01T02:23:44.668 INFO:teuthology.orchestra.run.vm06.stdout:skipping the directory /root 2026-04-01T02:23:44.668 INFO:teuthology.orchestra.run.vm06.stdout:skipping the directory /tmp 2026-04-01T02:23:44.668 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:44.835 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.no 123/154 2026-04-01T02:23:44.859 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.no 123/154 2026-04-01T02:23:44.860 INFO:teuthology.orchestra.run.vm06.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:44.860 INFO:teuthology.orchestra.run.vm06.stdout:Invalid unit name "ceph-volume@*.service" escaped as "ceph-volume@\x2a.service". 2026-04-01T02:23:44.860 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:45.648 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-selinux-2:20.2.0-8.g0597158282e.el9.clyso.x 122/154 2026-04-01T02:23:45.648 INFO:teuthology.orchestra.run.vm08.stdout:skipping the directory /sys 2026-04-01T02:23:45.648 INFO:teuthology.orchestra.run.vm08.stdout:skipping the directory /proc 2026-04-01T02:23:45.648 INFO:teuthology.orchestra.run.vm08.stdout:skipping the directory /mnt 2026-04-01T02:23:45.648 INFO:teuthology.orchestra.run.vm08.stdout:skipping the directory /var/tmp 2026-04-01T02:23:45.648 INFO:teuthology.orchestra.run.vm08.stdout:skipping the directory /home 2026-04-01T02:23:45.648 INFO:teuthology.orchestra.run.vm08.stdout:skipping the directory /root 2026-04-01T02:23:45.648 INFO:teuthology.orchestra.run.vm08.stdout:skipping the directory /tmp 2026-04-01T02:23:45.648 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:45.743 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 124/154 2026-04-01T02:23:45.761 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.no 123/154 2026-04-01T02:23:45.778 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 124/154 2026-04-01T02:23:45.779 INFO:teuthology.orchestra.run.vm06.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:45.779 INFO:teuthology.orchestra.run.vm06.stdout:Invalid unit name "ceph-osd@*.service" escaped as "ceph-osd@\x2a.service". 2026-04-01T02:23:45.779 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-osd.target → /usr/lib/systemd/system/ceph-osd.target. 2026-04-01T02:23:45.779 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-osd.target → /usr/lib/systemd/system/ceph-osd.target. 2026-04-01T02:23:45.779 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:45.784 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.no 123/154 2026-04-01T02:23:45.784 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:45.784 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-volume@*.service" escaped as "ceph-volume@\x2a.service". 2026-04-01T02:23:45.784 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:45.915 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 125/154 2026-04-01T02:23:45.944 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 125/154 2026-04-01T02:23:45.944 INFO:teuthology.orchestra.run.vm06.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:45.944 INFO:teuthology.orchestra.run.vm06.stdout:Invalid unit name "ceph-mds@*.service" escaped as "ceph-mds@\x2a.service". 2026-04-01T02:23:45.944 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mds.target → /usr/lib/systemd/system/ceph-mds.target. 2026-04-01T02:23:45.944 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mds.target → /usr/lib/systemd/system/ceph-mds.target. 2026-04-01T02:23:45.944 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:46.228 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 126/154 2026-04-01T02:23:46.259 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 126/154 2026-04-01T02:23:46.259 INFO:teuthology.orchestra.run.vm06.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:46.259 INFO:teuthology.orchestra.run.vm06.stdout:Invalid unit name "ceph-mon@*.service" escaped as "ceph-mon@\x2a.service". 2026-04-01T02:23:46.259 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mon.target → /usr/lib/systemd/system/ceph-mon.target. 2026-04-01T02:23:46.259 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mon.target → /usr/lib/systemd/system/ceph-mon.target. 2026-04-01T02:23:46.259 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:46.295 INFO:teuthology.orchestra.run.vm06.stdout: Installing : grpc-data-1.46.7-10.el9.noarch 127/154 2026-04-01T02:23:46.357 INFO:teuthology.orchestra.run.vm06.stdout: Installing : abseil-cpp-20211102.0-4.el9.x86_64 128/154 2026-04-01T02:23:46.380 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-grpcio-1.46.7-10.el9.x86_64 129/154 2026-04-01T02:23:46.386 INFO:teuthology.orchestra.run.vm06.stdout: Installing : python3-grpcio-tools-1.46.7-10.el9.x86_64 130/154 2026-04-01T02:23:46.470 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: cephadm-2:20.2.0-8.g0597158282e.el9.clyso.noarch 131/154 2026-04-01T02:23:46.476 INFO:teuthology.orchestra.run.vm06.stdout: Installing : cephadm-2:20.2.0-8.g0597158282e.el9.clyso.noarch 131/154 2026-04-01T02:23:46.489 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-prometheus-alerts-2:20.2.0-8.g0597158282e.e 132/154 2026-04-01T02:23:46.528 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-grafana-dashboards-2:20.2.0-8.g0597158282e. 133/154 2026-04-01T02:23:46.533 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.cly 134/154 2026-04-01T02:23:46.703 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 124/154 2026-04-01T02:23:46.741 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 124/154 2026-04-01T02:23:46.741 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:46.741 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-osd@*.service" escaped as "ceph-osd@\x2a.service". 2026-04-01T02:23:46.741 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-osd.target → /usr/lib/systemd/system/ceph-osd.target. 2026-04-01T02:23:46.741 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-osd.target → /usr/lib/systemd/system/ceph-osd.target. 2026-04-01T02:23:46.741 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:46.868 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 125/154 2026-04-01T02:23:46.893 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 125/154 2026-04-01T02:23:46.893 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:46.893 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-mds@*.service" escaped as "ceph-mds@\x2a.service". 2026-04-01T02:23:46.893 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mds.target → /usr/lib/systemd/system/ceph-mds.target. 2026-04-01T02:23:46.893 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mds.target → /usr/lib/systemd/system/ceph-mds.target. 2026-04-01T02:23:46.893 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:47.187 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 126/154 2026-04-01T02:23:47.217 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 126/154 2026-04-01T02:23:47.217 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:47.217 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-mon@*.service" escaped as "ceph-mon@\x2a.service". 2026-04-01T02:23:47.217 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mon.target → /usr/lib/systemd/system/ceph-mon.target. 2026-04-01T02:23:47.217 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mon.target → /usr/lib/systemd/system/ceph-mon.target. 2026-04-01T02:23:47.217 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:47.255 INFO:teuthology.orchestra.run.vm08.stdout: Installing : grpc-data-1.46.7-10.el9.noarch 127/154 2026-04-01T02:23:47.332 INFO:teuthology.orchestra.run.vm08.stdout: Installing : abseil-cpp-20211102.0-4.el9.x86_64 128/154 2026-04-01T02:23:47.354 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-grpcio-1.46.7-10.el9.x86_64 129/154 2026-04-01T02:23:47.359 INFO:teuthology.orchestra.run.vm08.stdout: Installing : python3-grpcio-tools-1.46.7-10.el9.x86_64 130/154 2026-04-01T02:23:47.442 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: cephadm-2:20.2.0-8.g0597158282e.el9.clyso.noarch 131/154 2026-04-01T02:23:47.447 INFO:teuthology.orchestra.run.vm08.stdout: Installing : cephadm-2:20.2.0-8.g0597158282e.el9.clyso.noarch 131/154 2026-04-01T02:23:47.458 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-prometheus-alerts-2:20.2.0-8.g0597158282e.e 132/154 2026-04-01T02:23:47.494 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-grafana-dashboards-2:20.2.0-8.g0597158282e. 133/154 2026-04-01T02:23:47.498 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.cly 134/154 2026-04-01T02:23:48.720 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.cly 134/154 2026-04-01T02:23:48.819 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.c 135/154 2026-04-01T02:23:49.457 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.c 135/154 2026-04-01T02:23:49.460 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-mgr-diskprediction-local-2:20.2.0-8.g059715 136/154 2026-04-01T02:23:49.482 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-mgr-diskprediction-local-2:20.2.0-8.g059715 136/154 2026-04-01T02:23:49.485 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.c 137/154 2026-04-01T02:23:49.573 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.c 137/154 2026-04-01T02:23:49.606 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.cly 134/154 2026-04-01T02:23:49.622 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.c 135/154 2026-04-01T02:23:49.629 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-mgr-modules-core-2:20.2.0-8.g0597158282e.el 138/154 2026-04-01T02:23:49.633 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 139/154 2026-04-01T02:23:49.663 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 139/154 2026-04-01T02:23:49.663 INFO:teuthology.orchestra.run.vm06.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:49.663 INFO:teuthology.orchestra.run.vm06.stdout:Invalid unit name "ceph-mgr@*.service" escaped as "ceph-mgr@\x2a.service". 2026-04-01T02:23:49.663 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mgr.target → /usr/lib/systemd/system/ceph-mgr.target. 2026-04-01T02:23:49.663 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mgr.target → /usr/lib/systemd/system/ceph-mgr.target. 2026-04-01T02:23:49.663 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:49.680 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso. 140/154 2026-04-01T02:23:49.697 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso. 140/154 2026-04-01T02:23:49.724 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-selinux-2:20.2.0-8.g0597158282e.el9.clyso.x 122/154 2026-04-01T02:23:49.724 INFO:teuthology.orchestra.run.vm03.stdout:skipping the directory /sys 2026-04-01T02:23:49.724 INFO:teuthology.orchestra.run.vm03.stdout:skipping the directory /proc 2026-04-01T02:23:49.724 INFO:teuthology.orchestra.run.vm03.stdout:skipping the directory /mnt 2026-04-01T02:23:49.724 INFO:teuthology.orchestra.run.vm03.stdout:skipping the directory /var/tmp 2026-04-01T02:23:49.724 INFO:teuthology.orchestra.run.vm03.stdout:skipping the directory /home 2026-04-01T02:23:49.724 INFO:teuthology.orchestra.run.vm03.stdout:skipping the directory /root 2026-04-01T02:23:49.724 INFO:teuthology.orchestra.run.vm03.stdout:skipping the directory /tmp 2026-04-01T02:23:49.724 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:49.822 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.no 123/154 2026-04-01T02:23:49.844 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.no 123/154 2026-04-01T02:23:49.844 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:49.844 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-volume@*.service" escaped as "ceph-volume@\x2a.service". 2026-04-01T02:23:49.844 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:50.248 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.c 135/154 2026-04-01T02:23:50.252 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-mgr-diskprediction-local-2:20.2.0-8.g059715 136/154 2026-04-01T02:23:50.271 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mgr-diskprediction-local-2:20.2.0-8.g059715 136/154 2026-04-01T02:23:50.272 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.c 137/154 2026-04-01T02:23:50.359 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.c 137/154 2026-04-01T02:23:50.418 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-mgr-modules-core-2:20.2.0-8.g0597158282e.el 138/154 2026-04-01T02:23:50.421 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 139/154 2026-04-01T02:23:50.448 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 139/154 2026-04-01T02:23:50.448 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:50.448 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-mgr@*.service" escaped as "ceph-mgr@\x2a.service". 2026-04-01T02:23:50.448 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mgr.target → /usr/lib/systemd/system/ceph-mgr.target. 2026-04-01T02:23:50.448 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mgr.target → /usr/lib/systemd/system/ceph-mgr.target. 2026-04-01T02:23:50.448 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:50.465 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso. 140/154 2026-04-01T02:23:50.479 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso. 140/154 2026-04-01T02:23:50.722 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 124/154 2026-04-01T02:23:50.756 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 124/154 2026-04-01T02:23:50.756 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:50.756 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-osd@*.service" escaped as "ceph-osd@\x2a.service". 2026-04-01T02:23:50.756 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-osd.target → /usr/lib/systemd/system/ceph-osd.target. 2026-04-01T02:23:50.756 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-osd.target → /usr/lib/systemd/system/ceph-osd.target. 2026-04-01T02:23:50.756 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:50.873 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 125/154 2026-04-01T02:23:50.902 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 125/154 2026-04-01T02:23:50.903 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:50.903 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-mds@*.service" escaped as "ceph-mds@\x2a.service". 2026-04-01T02:23:50.903 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mds.target → /usr/lib/systemd/system/ceph-mds.target. 2026-04-01T02:23:50.903 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mds.target → /usr/lib/systemd/system/ceph-mds.target. 2026-04-01T02:23:50.903 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:51.019 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 141/154 2026-04-01T02:23:51.023 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x 142/154 2026-04-01T02:23:51.053 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x 142/154 2026-04-01T02:23:51.053 INFO:teuthology.orchestra.run.vm06.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:51.053 INFO:teuthology.orchestra.run.vm06.stdout:Invalid unit name "ceph-radosgw@*.service" escaped as "ceph-radosgw@\x2a.service". 2026-04-01T02:23:51.053 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-radosgw.target → /usr/lib/systemd/system/ceph-radosgw.target. 2026-04-01T02:23:51.053 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-radosgw.target → /usr/lib/systemd/system/ceph-radosgw.target. 2026-04-01T02:23:51.053 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:51.066 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-immutable-object-cache-2:20.2.0-8.g05971582 143/154 2026-04-01T02:23:51.094 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-8.g05971582 143/154 2026-04-01T02:23:51.094 INFO:teuthology.orchestra.run.vm06.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:51.094 INFO:teuthology.orchestra.run.vm06.stdout:Invalid unit name "ceph-immutable-object-cache@*.service" escaped as "ceph-immutable-object-cache@\x2a.service". 2026-04-01T02:23:51.094 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:51.180 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 126/154 2026-04-01T02:23:51.212 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 126/154 2026-04-01T02:23:51.212 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:51.212 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-mon@*.service" escaped as "ceph-mon@\x2a.service". 2026-04-01T02:23:51.212 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mon.target → /usr/lib/systemd/system/ceph-mon.target. 2026-04-01T02:23:51.212 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mon.target → /usr/lib/systemd/system/ceph-mon.target. 2026-04-01T02:23:51.212 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:51.246 INFO:teuthology.orchestra.run.vm06.stdout: Installing : rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86 144/154 2026-04-01T02:23:51.260 INFO:teuthology.orchestra.run.vm03.stdout: Installing : grpc-data-1.46.7-10.el9.noarch 127/154 2026-04-01T02:23:51.278 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86 144/154 2026-04-01T02:23:51.278 INFO:teuthology.orchestra.run.vm06.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:51.278 INFO:teuthology.orchestra.run.vm06.stdout:Invalid unit name "ceph-rbd-mirror@*.service" escaped as "ceph-rbd-mirror@\x2a.service". 2026-04-01T02:23:51.278 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-rbd-mirror.target → /usr/lib/systemd/system/ceph-rbd-mirror.target. 2026-04-01T02:23:51.278 INFO:teuthology.orchestra.run.vm06.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-rbd-mirror.target → /usr/lib/systemd/system/ceph-rbd-mirror.target. 2026-04-01T02:23:51.278 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:51.323 INFO:teuthology.orchestra.run.vm03.stdout: Installing : abseil-cpp-20211102.0-4.el9.x86_64 128/154 2026-04-01T02:23:51.341 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-grpcio-1.46.7-10.el9.x86_64 129/154 2026-04-01T02:23:51.346 INFO:teuthology.orchestra.run.vm03.stdout: Installing : python3-grpcio-tools-1.46.7-10.el9.x86_64 130/154 2026-04-01T02:23:51.429 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: cephadm-2:20.2.0-8.g0597158282e.el9.clyso.noarch 131/154 2026-04-01T02:23:51.436 INFO:teuthology.orchestra.run.vm03.stdout: Installing : cephadm-2:20.2.0-8.g0597158282e.el9.clyso.noarch 131/154 2026-04-01T02:23:51.448 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-prometheus-alerts-2:20.2.0-8.g0597158282e.e 132/154 2026-04-01T02:23:51.481 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-grafana-dashboards-2:20.2.0-8.g0597158282e. 133/154 2026-04-01T02:23:51.491 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.cly 134/154 2026-04-01T02:23:51.810 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 141/154 2026-04-01T02:23:51.814 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x 142/154 2026-04-01T02:23:51.842 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x 142/154 2026-04-01T02:23:51.842 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:51.842 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-radosgw@*.service" escaped as "ceph-radosgw@\x2a.service". 2026-04-01T02:23:51.842 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-radosgw.target → /usr/lib/systemd/system/ceph-radosgw.target. 2026-04-01T02:23:51.842 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-radosgw.target → /usr/lib/systemd/system/ceph-radosgw.target. 2026-04-01T02:23:51.842 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:51.860 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-immutable-object-cache-2:20.2.0-8.g05971582 143/154 2026-04-01T02:23:51.891 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-8.g05971582 143/154 2026-04-01T02:23:51.891 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:51.891 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-immutable-object-cache@*.service" escaped as "ceph-immutable-object-cache@\x2a.service". 2026-04-01T02:23:51.891 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:52.058 INFO:teuthology.orchestra.run.vm08.stdout: Installing : rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86 144/154 2026-04-01T02:23:52.085 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86 144/154 2026-04-01T02:23:52.085 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:52.085 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-rbd-mirror@*.service" escaped as "ceph-rbd-mirror@\x2a.service". 2026-04-01T02:23:52.085 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-rbd-mirror.target → /usr/lib/systemd/system/ceph-rbd-mirror.target. 2026-04-01T02:23:52.085 INFO:teuthology.orchestra.run.vm08.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-rbd-mirror.target → /usr/lib/systemd/system/ceph-rbd-mirror.target. 2026-04-01T02:23:52.085 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:53.611 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.cly 134/154 2026-04-01T02:23:53.629 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.c 135/154 2026-04-01T02:23:54.242 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.c 135/154 2026-04-01T02:23:54.246 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-mgr-diskprediction-local-2:20.2.0-8.g059715 136/154 2026-04-01T02:23:54.267 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mgr-diskprediction-local-2:20.2.0-8.g059715 136/154 2026-04-01T02:23:54.268 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.c 137/154 2026-04-01T02:23:54.348 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.c 137/154 2026-04-01T02:23:54.411 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-mgr-modules-core-2:20.2.0-8.g0597158282e.el 138/154 2026-04-01T02:23:54.414 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 139/154 2026-04-01T02:23:54.446 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 139/154 2026-04-01T02:23:54.446 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:54.446 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-mgr@*.service" escaped as "ceph-mgr@\x2a.service". 2026-04-01T02:23:54.446 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mgr.target → /usr/lib/systemd/system/ceph-mgr.target. 2026-04-01T02:23:54.446 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mgr.target → /usr/lib/systemd/system/ceph-mgr.target. 2026-04-01T02:23:54.446 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:54.463 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso. 140/154 2026-04-01T02:23:54.479 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso. 140/154 2026-04-01T02:23:55.807 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 141/154 2026-04-01T02:23:55.861 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x 142/154 2026-04-01T02:23:55.890 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x 142/154 2026-04-01T02:23:55.898 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:55.898 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-radosgw@*.service" escaped as "ceph-radosgw@\x2a.service". 2026-04-01T02:23:55.898 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-radosgw.target → /usr/lib/systemd/system/ceph-radosgw.target. 2026-04-01T02:23:55.898 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-radosgw.target → /usr/lib/systemd/system/ceph-radosgw.target. 2026-04-01T02:23:55.898 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:55.931 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-immutable-object-cache-2:20.2.0-8.g05971582 143/154 2026-04-01T02:23:55.961 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-8.g05971582 143/154 2026-04-01T02:23:55.961 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:55.961 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-immutable-object-cache@*.service" escaped as "ceph-immutable-object-cache@\x2a.service". 2026-04-01T02:23:55.961 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:56.128 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-test-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 145/154 2026-04-01T02:23:56.140 INFO:teuthology.orchestra.run.vm03.stdout: Installing : rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86 144/154 2026-04-01T02:23:56.169 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86 144/154 2026-04-01T02:23:56.169 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:23:56.169 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-rbd-mirror@*.service" escaped as "ceph-rbd-mirror@\x2a.service". 2026-04-01T02:23:56.169 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-rbd-mirror.target → /usr/lib/systemd/system/ceph-rbd-mirror.target. 2026-04-01T02:23:56.169 INFO:teuthology.orchestra.run.vm03.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-rbd-mirror.target → /usr/lib/systemd/system/ceph-rbd-mirror.target. 2026-04-01T02:23:56.169 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:23:56.179 INFO:teuthology.orchestra.run.vm06.stdout: Installing : ceph-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 146/154 2026-04-01T02:23:56.202 INFO:teuthology.orchestra.run.vm06.stdout: Installing : perl-Test-Harness-1:3.42-461.el9.noarch 147/154 2026-04-01T02:23:56.216 INFO:teuthology.orchestra.run.vm06.stdout: Installing : libcephfs-devel-2:20.2.0-8.g0597158282e.el9.clys 148/154 2026-04-01T02:23:56.231 INFO:teuthology.orchestra.run.vm06.stdout: Installing : rbd-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 149/154 2026-04-01T02:23:56.276 INFO:teuthology.orchestra.run.vm06.stdout: Installing : rbd-nbd-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 150/154 2026-04-01T02:23:56.300 INFO:teuthology.orchestra.run.vm06.stdout: Installing : bzip2-1.0.8-10.el9_5.x86_64 151/154 2026-04-01T02:23:56.327 INFO:teuthology.orchestra.run.vm06.stdout: Installing : s3cmd-2.4.0-1.el9.noarch 152/154 2026-04-01T02:23:56.327 INFO:teuthology.orchestra.run.vm06.stdout: Cleanup : librbd1-2:16.2.4-5.el9.x86_64 153/154 2026-04-01T02:23:56.353 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: librbd1-2:16.2.4-5.el9.x86_64 153/154 2026-04-01T02:23:56.353 INFO:teuthology.orchestra.run.vm06.stdout: Cleanup : librados2-2:16.2.4-5.el9.x86_64 154/154 2026-04-01T02:23:56.958 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-test-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 145/154 2026-04-01T02:23:57.004 INFO:teuthology.orchestra.run.vm08.stdout: Installing : ceph-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 146/154 2026-04-01T02:23:57.015 INFO:teuthology.orchestra.run.vm08.stdout: Installing : perl-Test-Harness-1:3.42-461.el9.noarch 147/154 2026-04-01T02:23:57.025 INFO:teuthology.orchestra.run.vm08.stdout: Installing : libcephfs-devel-2:20.2.0-8.g0597158282e.el9.clys 148/154 2026-04-01T02:23:57.041 INFO:teuthology.orchestra.run.vm08.stdout: Installing : rbd-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 149/154 2026-04-01T02:23:57.050 INFO:teuthology.orchestra.run.vm08.stdout: Installing : rbd-nbd-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 150/154 2026-04-01T02:23:57.073 INFO:teuthology.orchestra.run.vm08.stdout: Installing : bzip2-1.0.8-10.el9_5.x86_64 151/154 2026-04-01T02:23:57.078 INFO:teuthology.orchestra.run.vm08.stdout: Installing : s3cmd-2.4.0-1.el9.noarch 152/154 2026-04-01T02:23:57.079 INFO:teuthology.orchestra.run.vm08.stdout: Cleanup : librbd1-2:16.2.4-5.el9.x86_64 153/154 2026-04-01T02:23:57.105 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: librbd1-2:16.2.4-5.el9.x86_64 153/154 2026-04-01T02:23:57.105 INFO:teuthology.orchestra.run.vm08.stdout: Cleanup : librados2-2:16.2.4-5.el9.x86_64 154/154 2026-04-01T02:23:58.284 INFO:teuthology.orchestra.run.vm06.stdout: Running scriptlet: librados2-2:16.2.4-5.el9.x86_64 154/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 1/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 2/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 3/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 4/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-immutable-object-cache-2:20.2.0-8.g05971582 5/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 6/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 7/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 8/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 9/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x 10/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-selinux-2:20.2.0-8.g0597158282e.el9.clyso.x 11/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-test-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 12/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libcephfs-daemon-2:20.2.0-8.g0597158282e.el9.cly 13/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libcephfs-devel-2:20.2.0-8.g0597158282e.el9.clys 14/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libcephfs-proxy2-2:20.2.0-8.g0597158282e.el9.cly 15/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libcephfs2-2:20.2.0-8.g0597158282e.el9.clyso.x86 16/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libcephsqlite-2:20.2.0-8.g0597158282e.el9.clyso. 17/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : librados-devel-2:20.2.0-8.g0597158282e.el9.clyso 18/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libradosstriper1-2:20.2.0-8.g0597158282e.el9.cly 19/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : librgw2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 20/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-ceph-argparse-2:20.2.0-8.g0597158282e.el 21/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-ceph-common-2:20.2.0-8.g0597158282e.el9. 22/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-cephfs-2:20.2.0-8.g0597158282e.el9.clyso 23/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-rados-2:20.2.0-8.g0597158282e.el9.clyso. 24/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-rbd-2:20.2.0-8.g0597158282e.el9.clyso.x8 25/154 2026-04-01T02:23:58.285 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-rgw-2:20.2.0-8.g0597158282e.el9.clyso.x8 26/154 2026-04-01T02:23:58.287 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : rbd-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 27/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86 28/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : rbd-nbd-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 29/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-grafana-dashboards-2:20.2.0-8.g0597158282e. 30/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.cly 31/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.c 32/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-mgr-diskprediction-local-2:20.2.0-8.g059715 33/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.c 34/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-mgr-modules-core-2:20.2.0-8.g0597158282e.el 35/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso. 36/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-prometheus-alerts-2:20.2.0-8.g0597158282e.e 37/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.no 38/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : cephadm-2:20.2.0-8.g0597158282e.el9.clyso.noarch 39/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : abseil-cpp-20211102.0-4.el9.x86_64 40/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : gperftools-libs-2.9.1-3.el9.x86_64 41/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : grpc-data-1.46.7-10.el9.noarch 42/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libarrow-9.0.0-15.el9.x86_64 43/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libarrow-doc-9.0.0-15.el9.noarch 44/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : liboath-2.6.12-1.el9.x86_64 45/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libunwind-1.6.2-1.el9.x86_64 46/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : luarocks-3.9.2-5.el9.noarch 47/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : parquet-libs-9.0.0-15.el9.x86_64 48/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-asyncssh-2.13.2-5.el9.noarch 49/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-autocommand-2.2.2-8.el9.noarch 50/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-backports-tarfile-1.2.0-1.el9.noarch 51/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-bcrypt-3.2.2-1.el9.x86_64 52/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-cachetools-4.2.4-1.el9.noarch 53/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-certifi-2023.05.07-4.el9.noarch 54/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-cheroot-10.0.1-5.el9.noarch 55/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-cherrypy-18.10.0-5.el9.noarch 56/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-google-auth-1:2.45.0-1.el9.noarch 57/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-grpcio-1.46.7-10.el9.x86_64 58/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-grpcio-tools-1.46.7-10.el9.x86_64 59/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-influxdb-5.3.1-1.el9.noarch 60/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-isodate-0.6.1-3.el9.noarch 61/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-jaraco-8.2.1-3.el9.noarch 62/154 2026-04-01T02:23:58.288 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-jaraco-classes-3.2.1-5.el9.noarch 63/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-jaraco-collections-3.0.0-8.el9.noarch 64/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-jaraco-context-6.0.1-3.el9.noarch 65/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-jaraco-functools-3.5.0-2.el9.noarch 66/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-jaraco-text-4.0.0-2.el9.noarch 67/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-kubernetes-1:26.1.0-3.el9.noarch 68/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-more-itertools-8.12.0-2.el9.noarch 69/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-msgpack-1.0.3-2.el9.x86_64 70/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-natsort-7.1.1-5.el9.noarch 71/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-portend-3.1.0-2.el9.noarch 72/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-pyOpenSSL-21.0.0-1.el9.noarch 73/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-repoze-lru-0.7-16.el9.noarch 74/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-routes-2.5.1-5.el9.noarch 75/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-rsa-4.9-2.el9.noarch 76/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-saml-1.16.0-1.el9.noarch 77/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-tempora-5.0.0-2.el9.noarch 78/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-typing-extensions-4.15.0-1.el9.noarch 79/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-websocket-client-1.2.3-2.el9.noarch 80/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-xmlsec-1.3.13-1.el9.x86_64 81/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-xmltodict-0.12.0-15.el9.noarch 82/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-zc-lockfile-2.0-10.el9.noarch 83/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : re2-1:20211101-20.el9.x86_64 84/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : s3cmd-2.4.0-1.el9.noarch 85/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : thrift-0.15.0-4.el9.x86_64 86/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : bzip2-1.0.8-10.el9_5.x86_64 87/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : c-ares-1.19.1-2.el9_4.x86_64 88/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : cryptsetup-2.7.2-4.el9.x86_64 89/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : fuse-2.9.9-17.el9.x86_64 90/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : ledmon-libs-1.1.0-3.el9.x86_64 91/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libconfig-1.7.2-9.el9.x86_64 92/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libgfortran-11.5.0-11.el9.x86_64 93/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libquadmath-11.5.0-11.el9.x86_64 94/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : lmdb-libs-0.9.29-3.el9.x86_64 95/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : mailcap-2.1.49-5.el9.0.2.noarch 96/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : nvme-cli-2.13-1.el9.x86_64 97/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : pciutils-3.7.0-7.el9.x86_64 98/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-cffi-1.14.5-5.el9.x86_64 99/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-cryptography-36.0.1-5.el9_6.x86_64 100/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-ply-3.11-14.el9.0.1.noarch 101/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-pycparser-2.20-6.el9.noarch 102/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-pyparsing-2.4.7-9.el9.0.1.noarch 103/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-requests-2.25.1-10.el9_6.noarch 104/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-urllib3-1.26.5-6.el9_7.1.noarch 105/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : smartmontools-1:7.2-9.el9.x86_64 106/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : unzip-6.0-59.el9.x86_64 107/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : zip-3.0-35.el9.x86_64 108/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : boost-program-options-1.75.0-13.el9_7.x86_64 109/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : flexiblas-3.0.4-8.el9.0.1.x86_64 110/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : flexiblas-netlib-3.0.4-8.el9.0.1.x86_64 111/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : flexiblas-openblas-openmp-3.0.4-8.el9.0.1.x86_64 112/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libnbd-1.20.3-4.el9.x86_64 113/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libpmemobj-1.12.1-1.el9.x86_64 114/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : librabbitmq-0.11.0-7.el9.x86_64 115/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : librdkafka-1.6.1-102.el9.x86_64 116/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libstoragemgmt-1.10.1-1.el9.x86_64 117/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : libxslt-1.1.34-13.el9_6.x86_64 118/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : lttng-ust-2.12.0-6.el9.x86_64 119/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : lua-5.4.4-4.el9.x86_64 120/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : openblas-0.3.29-1.el9.x86_64 121/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : openblas-openmp-0.3.29-1.el9.x86_64 122/154 2026-04-01T02:23:58.289 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : perl-Benchmark-1.23-481.1.el9_6.noarch 123/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : perl-Test-Harness-1:3.42-461.el9.noarch 124/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : protobuf-3.14.0-17.el9_7.x86_64 125/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-babel-2.9.1-2.el9.noarch 126/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-devel-3.9.23-2.el9.x86_64 127/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-jinja2-2.11.3-8.el9_5.noarch 128/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-jmespath-1.0.1-1.el9_7.noarch 129/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-libstoragemgmt-1.10.1-1.el9.x86_64 130/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-lxml-4.6.5-3.el9.x86_64 131/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-markupsafe-1.1.1-12.el9.x86_64 132/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-numpy-1:1.23.5-2.el9_7.x86_64 133/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-numpy-f2py-1:1.23.5-2.el9_7.x86_64 134/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-packaging-20.9-5.el9.noarch 135/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-protobuf-3.14.0-17.el9_7.noarch 136/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-pyasn1-0.4.8-7.el9_7.noarch 137/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-pyasn1-modules-0.4.8-7.el9_7.noarch 138/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-requests-oauthlib-1.3.0-12.el9.noarch 139/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-scipy-1.9.3-2.el9.x86_64 140/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : python3-toml-0.10.2-6.el9.0.1.noarch 141/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : qatlib-24.09.0-1.el9.x86_64 142/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : qatlib-service-24.09.0-1.el9.x86_64 143/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : qatzip-libs-1.3.1-1.el9.x86_64 144/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : socat-1.7.4.1-8.el9.x86_64 145/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : xmlsec1-1.2.29-13.el9.x86_64 146/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : xmlsec1-openssl-1.2.29-13.el9.x86_64 147/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : xmlstarlet-1.6.1-20.el9.x86_64 148/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : lua-devel-5.4.4-4.el9.x86_64 149/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : protobuf-compiler-3.14.0-17.el9_7.x86_64 150/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : librados2-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 151/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : librados2-2:16.2.4-5.el9.x86_64 152/154 2026-04-01T02:23:58.290 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : librbd1-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 153/154 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: Verifying : librbd1-2:16.2.4-5.el9.x86_64 154/154 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout:Upgraded: 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: librados2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: librbd1-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout:Installed: 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: abseil-cpp-20211102.0-4.el9.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: boost-program-options-1.75.0-13.el9_7.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: bzip2-1.0.8-10.el9_5.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: c-ares-1.19.1-2.el9_4.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-grafana-dashboards-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-immutable-object-cache-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mgr-diskprediction-local-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mgr-modules-core-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-prometheus-alerts-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-selinux-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-test-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: cephadm-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: cryptsetup-2.7.2-4.el9.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: flexiblas-3.0.4-8.el9.0.1.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: flexiblas-netlib-3.0.4-8.el9.0.1.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: flexiblas-openblas-openmp-3.0.4-8.el9.0.1.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: fuse-2.9.9-17.el9.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: gperftools-libs-2.9.1-3.el9.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: grpc-data-1.46.7-10.el9.noarch 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: ledmon-libs-1.1.0-3.el9.x86_64 2026-04-01T02:23:58.494 INFO:teuthology.orchestra.run.vm06.stdout: libarrow-9.0.0-15.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: libarrow-doc-9.0.0-15.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: libcephfs-daemon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: libcephfs-devel-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: libcephfs-proxy2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: libcephfs2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: libcephsqlite-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: libconfig-1.7.2-9.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: libgfortran-11.5.0-11.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: libnbd-1.20.3-4.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: liboath-2.6.12-1.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: libpmemobj-1.12.1-1.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: libquadmath-11.5.0-11.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: librabbitmq-0.11.0-7.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: librados-devel-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: libradosstriper1-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: librdkafka-1.6.1-102.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: librgw2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: libstoragemgmt-1.10.1-1.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: libunwind-1.6.2-1.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: libxslt-1.1.34-13.el9_6.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: lmdb-libs-0.9.29-3.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: lttng-ust-2.12.0-6.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: lua-5.4.4-4.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: lua-devel-5.4.4-4.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: luarocks-3.9.2-5.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: mailcap-2.1.49-5.el9.0.2.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: nvme-cli-2.13-1.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: openblas-0.3.29-1.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: openblas-openmp-0.3.29-1.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: parquet-libs-9.0.0-15.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: pciutils-3.7.0-7.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: perl-Benchmark-1.23-481.1.el9_6.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: perl-Test-Harness-1:3.42-461.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: protobuf-3.14.0-17.el9_7.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: protobuf-compiler-3.14.0-17.el9_7.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-asyncssh-2.13.2-5.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-autocommand-2.2.2-8.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-babel-2.9.1-2.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-backports-tarfile-1.2.0-1.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-bcrypt-3.2.2-1.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-cachetools-4.2.4-1.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-ceph-argparse-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-cephfs-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-certifi-2023.05.07-4.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-cffi-1.14.5-5.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-cheroot-10.0.1-5.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-cherrypy-18.10.0-5.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-cryptography-36.0.1-5.el9_6.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-devel-3.9.23-2.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-google-auth-1:2.45.0-1.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-grpcio-1.46.7-10.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-grpcio-tools-1.46.7-10.el9.x86_64 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-influxdb-5.3.1-1.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-isodate-0.6.1-3.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-jaraco-8.2.1-3.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-jaraco-classes-3.2.1-5.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-jaraco-collections-3.0.0-8.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-jaraco-context-6.0.1-3.el9.noarch 2026-04-01T02:23:58.495 INFO:teuthology.orchestra.run.vm06.stdout: python3-jaraco-functools-3.5.0-2.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-jaraco-text-4.0.0-2.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-jinja2-2.11.3-8.el9_5.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-jmespath-1.0.1-1.el9_7.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-kubernetes-1:26.1.0-3.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-libstoragemgmt-1.10.1-1.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-lxml-4.6.5-3.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-markupsafe-1.1.1-12.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-more-itertools-8.12.0-2.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-msgpack-1.0.3-2.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-natsort-7.1.1-5.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-numpy-1:1.23.5-2.el9_7.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-numpy-f2py-1:1.23.5-2.el9_7.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-packaging-20.9-5.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-ply-3.11-14.el9.0.1.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-portend-3.1.0-2.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-protobuf-3.14.0-17.el9_7.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-pyOpenSSL-21.0.0-1.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-pyasn1-0.4.8-7.el9_7.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-pyasn1-modules-0.4.8-7.el9_7.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-pycparser-2.20-6.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-pyparsing-2.4.7-9.el9.0.1.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-rados-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-rbd-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-repoze-lru-0.7-16.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-requests-2.25.1-10.el9_6.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-requests-oauthlib-1.3.0-12.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-rgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-routes-2.5.1-5.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-rsa-4.9-2.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-saml-1.16.0-1.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-scipy-1.9.3-2.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-tempora-5.0.0-2.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-toml-0.10.2-6.el9.0.1.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-typing-extensions-4.15.0-1.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-urllib3-1.26.5-6.el9_7.1.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-websocket-client-1.2.3-2.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-xmlsec-1.3.13-1.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-xmltodict-0.12.0-15.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: python3-zc-lockfile-2.0-10.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: qatlib-24.09.0-1.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: qatlib-service-24.09.0-1.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: qatzip-libs-1.3.1-1.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: rbd-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: rbd-nbd-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: re2-1:20211101-20.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: s3cmd-2.4.0-1.el9.noarch 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: smartmontools-1:7.2-9.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: socat-1.7.4.1-8.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: thrift-0.15.0-4.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: unzip-6.0-59.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: xmlsec1-1.2.29-13.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: xmlsec1-openssl-1.2.29-13.el9.x86_64 2026-04-01T02:23:58.496 INFO:teuthology.orchestra.run.vm06.stdout: xmlstarlet-1.6.1-20.el9.x86_64 2026-04-01T02:23:58.497 INFO:teuthology.orchestra.run.vm06.stdout: zip-3.0-35.el9.x86_64 2026-04-01T02:23:58.497 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:23:58.497 INFO:teuthology.orchestra.run.vm06.stdout:Complete! 2026-04-01T02:23:58.915 DEBUG:teuthology.parallel:result is None 2026-04-01T02:23:59.028 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: librados2-2:16.2.4-5.el9.x86_64 154/154 2026-04-01T02:23:59.028 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 1/154 2026-04-01T02:23:59.028 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 2/154 2026-04-01T02:23:59.028 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 3/154 2026-04-01T02:23:59.028 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 4/154 2026-04-01T02:23:59.028 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-immutable-object-cache-2:20.2.0-8.g05971582 5/154 2026-04-01T02:23:59.028 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 6/154 2026-04-01T02:23:59.028 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 7/154 2026-04-01T02:23:59.028 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 8/154 2026-04-01T02:23:59.028 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 9/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x 10/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-selinux-2:20.2.0-8.g0597158282e.el9.clyso.x 11/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-test-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 12/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libcephfs-daemon-2:20.2.0-8.g0597158282e.el9.cly 13/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libcephfs-devel-2:20.2.0-8.g0597158282e.el9.clys 14/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libcephfs-proxy2-2:20.2.0-8.g0597158282e.el9.cly 15/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libcephfs2-2:20.2.0-8.g0597158282e.el9.clyso.x86 16/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libcephsqlite-2:20.2.0-8.g0597158282e.el9.clyso. 17/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : librados-devel-2:20.2.0-8.g0597158282e.el9.clyso 18/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libradosstriper1-2:20.2.0-8.g0597158282e.el9.cly 19/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : librgw2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 20/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-ceph-argparse-2:20.2.0-8.g0597158282e.el 21/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-ceph-common-2:20.2.0-8.g0597158282e.el9. 22/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-cephfs-2:20.2.0-8.g0597158282e.el9.clyso 23/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-rados-2:20.2.0-8.g0597158282e.el9.clyso. 24/154 2026-04-01T02:23:59.029 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-rbd-2:20.2.0-8.g0597158282e.el9.clyso.x8 25/154 2026-04-01T02:23:59.030 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-rgw-2:20.2.0-8.g0597158282e.el9.clyso.x8 26/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : rbd-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 27/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86 28/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : rbd-nbd-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 29/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-grafana-dashboards-2:20.2.0-8.g0597158282e. 30/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.cly 31/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.c 32/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-mgr-diskprediction-local-2:20.2.0-8.g059715 33/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.c 34/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-mgr-modules-core-2:20.2.0-8.g0597158282e.el 35/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso. 36/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-prometheus-alerts-2:20.2.0-8.g0597158282e.e 37/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.no 38/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : cephadm-2:20.2.0-8.g0597158282e.el9.clyso.noarch 39/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : abseil-cpp-20211102.0-4.el9.x86_64 40/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : gperftools-libs-2.9.1-3.el9.x86_64 41/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : grpc-data-1.46.7-10.el9.noarch 42/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libarrow-9.0.0-15.el9.x86_64 43/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libarrow-doc-9.0.0-15.el9.noarch 44/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : liboath-2.6.12-1.el9.x86_64 45/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libunwind-1.6.2-1.el9.x86_64 46/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : luarocks-3.9.2-5.el9.noarch 47/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : parquet-libs-9.0.0-15.el9.x86_64 48/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-asyncssh-2.13.2-5.el9.noarch 49/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-autocommand-2.2.2-8.el9.noarch 50/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-backports-tarfile-1.2.0-1.el9.noarch 51/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-bcrypt-3.2.2-1.el9.x86_64 52/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-cachetools-4.2.4-1.el9.noarch 53/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-certifi-2023.05.07-4.el9.noarch 54/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-cheroot-10.0.1-5.el9.noarch 55/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-cherrypy-18.10.0-5.el9.noarch 56/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-google-auth-1:2.45.0-1.el9.noarch 57/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-grpcio-1.46.7-10.el9.x86_64 58/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-grpcio-tools-1.46.7-10.el9.x86_64 59/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-influxdb-5.3.1-1.el9.noarch 60/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-isodate-0.6.1-3.el9.noarch 61/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-jaraco-8.2.1-3.el9.noarch 62/154 2026-04-01T02:23:59.031 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-jaraco-classes-3.2.1-5.el9.noarch 63/154 2026-04-01T02:23:59.034 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-jaraco-collections-3.0.0-8.el9.noarch 64/154 2026-04-01T02:23:59.034 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-jaraco-context-6.0.1-3.el9.noarch 65/154 2026-04-01T02:23:59.034 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-jaraco-functools-3.5.0-2.el9.noarch 66/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-jaraco-text-4.0.0-2.el9.noarch 67/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-kubernetes-1:26.1.0-3.el9.noarch 68/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-more-itertools-8.12.0-2.el9.noarch 69/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-msgpack-1.0.3-2.el9.x86_64 70/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-natsort-7.1.1-5.el9.noarch 71/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-portend-3.1.0-2.el9.noarch 72/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-pyOpenSSL-21.0.0-1.el9.noarch 73/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-repoze-lru-0.7-16.el9.noarch 74/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-routes-2.5.1-5.el9.noarch 75/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-rsa-4.9-2.el9.noarch 76/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-saml-1.16.0-1.el9.noarch 77/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-tempora-5.0.0-2.el9.noarch 78/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-typing-extensions-4.15.0-1.el9.noarch 79/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-websocket-client-1.2.3-2.el9.noarch 80/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-xmlsec-1.3.13-1.el9.x86_64 81/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-xmltodict-0.12.0-15.el9.noarch 82/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-zc-lockfile-2.0-10.el9.noarch 83/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : re2-1:20211101-20.el9.x86_64 84/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : s3cmd-2.4.0-1.el9.noarch 85/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : thrift-0.15.0-4.el9.x86_64 86/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : bzip2-1.0.8-10.el9_5.x86_64 87/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : c-ares-1.19.1-2.el9_4.x86_64 88/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : cryptsetup-2.7.2-4.el9.x86_64 89/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : fuse-2.9.9-17.el9.x86_64 90/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ledmon-libs-1.1.0-3.el9.x86_64 91/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libconfig-1.7.2-9.el9.x86_64 92/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libgfortran-11.5.0-11.el9.x86_64 93/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libquadmath-11.5.0-11.el9.x86_64 94/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : lmdb-libs-0.9.29-3.el9.x86_64 95/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : mailcap-2.1.49-5.el9.0.2.noarch 96/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : nvme-cli-2.13-1.el9.x86_64 97/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : pciutils-3.7.0-7.el9.x86_64 98/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-cffi-1.14.5-5.el9.x86_64 99/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-cryptography-36.0.1-5.el9_6.x86_64 100/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-ply-3.11-14.el9.0.1.noarch 101/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-pycparser-2.20-6.el9.noarch 102/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-pyparsing-2.4.7-9.el9.0.1.noarch 103/154 2026-04-01T02:23:59.035 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-requests-2.25.1-10.el9_6.noarch 104/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-urllib3-1.26.5-6.el9_7.1.noarch 105/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : smartmontools-1:7.2-9.el9.x86_64 106/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : unzip-6.0-59.el9.x86_64 107/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : zip-3.0-35.el9.x86_64 108/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : boost-program-options-1.75.0-13.el9_7.x86_64 109/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : flexiblas-3.0.4-8.el9.0.1.x86_64 110/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : flexiblas-netlib-3.0.4-8.el9.0.1.x86_64 111/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : flexiblas-openblas-openmp-3.0.4-8.el9.0.1.x86_64 112/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libnbd-1.20.3-4.el9.x86_64 113/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libpmemobj-1.12.1-1.el9.x86_64 114/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : librabbitmq-0.11.0-7.el9.x86_64 115/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : librdkafka-1.6.1-102.el9.x86_64 116/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libstoragemgmt-1.10.1-1.el9.x86_64 117/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : libxslt-1.1.34-13.el9_6.x86_64 118/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : lttng-ust-2.12.0-6.el9.x86_64 119/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : lua-5.4.4-4.el9.x86_64 120/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : openblas-0.3.29-1.el9.x86_64 121/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : openblas-openmp-0.3.29-1.el9.x86_64 122/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : perl-Benchmark-1.23-481.1.el9_6.noarch 123/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : perl-Test-Harness-1:3.42-461.el9.noarch 124/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : protobuf-3.14.0-17.el9_7.x86_64 125/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-babel-2.9.1-2.el9.noarch 126/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-devel-3.9.23-2.el9.x86_64 127/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-jinja2-2.11.3-8.el9_5.noarch 128/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-jmespath-1.0.1-1.el9_7.noarch 129/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-libstoragemgmt-1.10.1-1.el9.x86_64 130/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-lxml-4.6.5-3.el9.x86_64 131/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-markupsafe-1.1.1-12.el9.x86_64 132/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-numpy-1:1.23.5-2.el9_7.x86_64 133/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-numpy-f2py-1:1.23.5-2.el9_7.x86_64 134/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-packaging-20.9-5.el9.noarch 135/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-protobuf-3.14.0-17.el9_7.noarch 136/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-pyasn1-0.4.8-7.el9_7.noarch 137/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-pyasn1-modules-0.4.8-7.el9_7.noarch 138/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-requests-oauthlib-1.3.0-12.el9.noarch 139/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-scipy-1.9.3-2.el9.x86_64 140/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : python3-toml-0.10.2-6.el9.0.1.noarch 141/154 2026-04-01T02:23:59.036 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : qatlib-24.09.0-1.el9.x86_64 142/154 2026-04-01T02:23:59.037 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : qatlib-service-24.09.0-1.el9.x86_64 143/154 2026-04-01T02:23:59.037 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : qatzip-libs-1.3.1-1.el9.x86_64 144/154 2026-04-01T02:23:59.037 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : socat-1.7.4.1-8.el9.x86_64 145/154 2026-04-01T02:23:59.037 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : xmlsec1-1.2.29-13.el9.x86_64 146/154 2026-04-01T02:23:59.037 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : xmlsec1-openssl-1.2.29-13.el9.x86_64 147/154 2026-04-01T02:23:59.037 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : xmlstarlet-1.6.1-20.el9.x86_64 148/154 2026-04-01T02:23:59.037 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : lua-devel-5.4.4-4.el9.x86_64 149/154 2026-04-01T02:23:59.037 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : protobuf-compiler-3.14.0-17.el9_7.x86_64 150/154 2026-04-01T02:23:59.037 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : librados2-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 151/154 2026-04-01T02:23:59.037 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : librados2-2:16.2.4-5.el9.x86_64 152/154 2026-04-01T02:23:59.037 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : librbd1-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 153/154 2026-04-01T02:23:59.149 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : librbd1-2:16.2.4-5.el9.x86_64 154/154 2026-04-01T02:23:59.149 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:59.149 INFO:teuthology.orchestra.run.vm08.stdout:Upgraded: 2026-04-01T02:23:59.149 INFO:teuthology.orchestra.run.vm08.stdout: librados2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.149 INFO:teuthology.orchestra.run.vm08.stdout: librbd1-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.149 INFO:teuthology.orchestra.run.vm08.stdout:Installed: 2026-04-01T02:23:59.149 INFO:teuthology.orchestra.run.vm08.stdout: abseil-cpp-20211102.0-4.el9.x86_64 2026-04-01T02:23:59.149 INFO:teuthology.orchestra.run.vm08.stdout: boost-program-options-1.75.0-13.el9_7.x86_64 2026-04-01T02:23:59.149 INFO:teuthology.orchestra.run.vm08.stdout: bzip2-1.0.8-10.el9_5.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: c-ares-1.19.1-2.el9_4.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-grafana-dashboards-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-immutable-object-cache-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-diskprediction-local-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-modules-core-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-prometheus-alerts-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-selinux-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-test-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: cephadm-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: cryptsetup-2.7.2-4.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: flexiblas-3.0.4-8.el9.0.1.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: flexiblas-netlib-3.0.4-8.el9.0.1.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: flexiblas-openblas-openmp-3.0.4-8.el9.0.1.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: fuse-2.9.9-17.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: gperftools-libs-2.9.1-3.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: grpc-data-1.46.7-10.el9.noarch 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: ledmon-libs-1.1.0-3.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libarrow-9.0.0-15.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libarrow-doc-9.0.0-15.el9.noarch 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libcephfs-daemon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libcephfs-devel-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libcephfs-proxy2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libcephfs2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libcephsqlite-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libconfig-1.7.2-9.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libgfortran-11.5.0-11.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libnbd-1.20.3-4.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: liboath-2.6.12-1.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libpmemobj-1.12.1-1.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libquadmath-11.5.0-11.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: librabbitmq-0.11.0-7.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: librados-devel-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libradosstriper1-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: librdkafka-1.6.1-102.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: librgw2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libstoragemgmt-1.10.1-1.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libunwind-1.6.2-1.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: libxslt-1.1.34-13.el9_6.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: lmdb-libs-0.9.29-3.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: lttng-ust-2.12.0-6.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: lua-5.4.4-4.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: lua-devel-5.4.4-4.el9.x86_64 2026-04-01T02:23:59.150 INFO:teuthology.orchestra.run.vm08.stdout: luarocks-3.9.2-5.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: mailcap-2.1.49-5.el9.0.2.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: nvme-cli-2.13-1.el9.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: openblas-0.3.29-1.el9.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: openblas-openmp-0.3.29-1.el9.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: parquet-libs-9.0.0-15.el9.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: pciutils-3.7.0-7.el9.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: perl-Benchmark-1.23-481.1.el9_6.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: perl-Test-Harness-1:3.42-461.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: protobuf-3.14.0-17.el9_7.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: protobuf-compiler-3.14.0-17.el9_7.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-asyncssh-2.13.2-5.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-autocommand-2.2.2-8.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-babel-2.9.1-2.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-backports-tarfile-1.2.0-1.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-bcrypt-3.2.2-1.el9.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-cachetools-4.2.4-1.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-ceph-argparse-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-cephfs-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-certifi-2023.05.07-4.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-cffi-1.14.5-5.el9.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-cheroot-10.0.1-5.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-cherrypy-18.10.0-5.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-cryptography-36.0.1-5.el9_6.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-devel-3.9.23-2.el9.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-google-auth-1:2.45.0-1.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-grpcio-1.46.7-10.el9.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-grpcio-tools-1.46.7-10.el9.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-influxdb-5.3.1-1.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-isodate-0.6.1-3.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-8.2.1-3.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-classes-3.2.1-5.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-collections-3.0.0-8.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-context-6.0.1-3.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-functools-3.5.0-2.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-text-4.0.0-2.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-jinja2-2.11.3-8.el9_5.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-jmespath-1.0.1-1.el9_7.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-kubernetes-1:26.1.0-3.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-libstoragemgmt-1.10.1-1.el9.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-lxml-4.6.5-3.el9.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-markupsafe-1.1.1-12.el9.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-more-itertools-8.12.0-2.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-msgpack-1.0.3-2.el9.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-natsort-7.1.1-5.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-numpy-1:1.23.5-2.el9_7.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-numpy-f2py-1:1.23.5-2.el9_7.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-packaging-20.9-5.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-ply-3.11-14.el9.0.1.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-portend-3.1.0-2.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-protobuf-3.14.0-17.el9_7.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-pyOpenSSL-21.0.0-1.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-pyasn1-0.4.8-7.el9_7.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-pyasn1-modules-0.4.8-7.el9_7.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-pycparser-2.20-6.el9.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-pyparsing-2.4.7-9.el9.0.1.noarch 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-rados-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-rbd-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.151 INFO:teuthology.orchestra.run.vm08.stdout: python3-repoze-lru-0.7-16.el9.noarch 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: python3-requests-2.25.1-10.el9_6.noarch 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: python3-requests-oauthlib-1.3.0-12.el9.noarch 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: python3-rgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: python3-routes-2.5.1-5.el9.noarch 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: python3-rsa-4.9-2.el9.noarch 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: python3-saml-1.16.0-1.el9.noarch 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: python3-scipy-1.9.3-2.el9.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: python3-tempora-5.0.0-2.el9.noarch 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: python3-toml-0.10.2-6.el9.0.1.noarch 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: python3-typing-extensions-4.15.0-1.el9.noarch 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: python3-urllib3-1.26.5-6.el9_7.1.noarch 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: python3-websocket-client-1.2.3-2.el9.noarch 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: python3-xmlsec-1.3.13-1.el9.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: python3-xmltodict-0.12.0-15.el9.noarch 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: python3-zc-lockfile-2.0-10.el9.noarch 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: qatlib-24.09.0-1.el9.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: qatlib-service-24.09.0-1.el9.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: qatzip-libs-1.3.1-1.el9.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: rbd-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: rbd-nbd-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: re2-1:20211101-20.el9.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: s3cmd-2.4.0-1.el9.noarch 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: smartmontools-1:7.2-9.el9.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: socat-1.7.4.1-8.el9.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: thrift-0.15.0-4.el9.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: unzip-6.0-59.el9.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: xmlsec1-1.2.29-13.el9.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: xmlsec1-openssl-1.2.29-13.el9.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: xmlstarlet-1.6.1-20.el9.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: zip-3.0-35.el9.x86_64 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:23:59.152 INFO:teuthology.orchestra.run.vm08.stdout:Complete! 2026-04-01T02:23:59.254 DEBUG:teuthology.parallel:result is None 2026-04-01T02:24:00.968 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-test-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 145/154 2026-04-01T02:24:01.003 INFO:teuthology.orchestra.run.vm03.stdout: Installing : ceph-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 146/154 2026-04-01T02:24:01.012 INFO:teuthology.orchestra.run.vm03.stdout: Installing : perl-Test-Harness-1:3.42-461.el9.noarch 147/154 2026-04-01T02:24:01.018 INFO:teuthology.orchestra.run.vm03.stdout: Installing : libcephfs-devel-2:20.2.0-8.g0597158282e.el9.clys 148/154 2026-04-01T02:24:01.031 INFO:teuthology.orchestra.run.vm03.stdout: Installing : rbd-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 149/154 2026-04-01T02:24:01.040 INFO:teuthology.orchestra.run.vm03.stdout: Installing : rbd-nbd-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 150/154 2026-04-01T02:24:01.065 INFO:teuthology.orchestra.run.vm03.stdout: Installing : bzip2-1.0.8-10.el9_5.x86_64 151/154 2026-04-01T02:24:01.070 INFO:teuthology.orchestra.run.vm03.stdout: Installing : s3cmd-2.4.0-1.el9.noarch 152/154 2026-04-01T02:24:01.070 INFO:teuthology.orchestra.run.vm03.stdout: Cleanup : librbd1-2:16.2.4-5.el9.x86_64 153/154 2026-04-01T02:24:01.092 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: librbd1-2:16.2.4-5.el9.x86_64 153/154 2026-04-01T02:24:01.092 INFO:teuthology.orchestra.run.vm03.stdout: Cleanup : librados2-2:16.2.4-5.el9.x86_64 154/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: librados2-2:16.2.4-5.el9.x86_64 154/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 1/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 2/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 3/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 4/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-immutable-object-cache-2:20.2.0-8.g05971582 5/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 6/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 7/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 8/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 9/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x 10/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-selinux-2:20.2.0-8.g0597158282e.el9.clyso.x 11/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-test-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 12/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libcephfs-daemon-2:20.2.0-8.g0597158282e.el9.cly 13/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libcephfs-devel-2:20.2.0-8.g0597158282e.el9.clys 14/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libcephfs-proxy2-2:20.2.0-8.g0597158282e.el9.cly 15/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libcephfs2-2:20.2.0-8.g0597158282e.el9.clyso.x86 16/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libcephsqlite-2:20.2.0-8.g0597158282e.el9.clyso. 17/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : librados-devel-2:20.2.0-8.g0597158282e.el9.clyso 18/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libradosstriper1-2:20.2.0-8.g0597158282e.el9.cly 19/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : librgw2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 20/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-ceph-argparse-2:20.2.0-8.g0597158282e.el 21/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-ceph-common-2:20.2.0-8.g0597158282e.el9. 22/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-cephfs-2:20.2.0-8.g0597158282e.el9.clyso 23/154 2026-04-01T02:24:02.706 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-rados-2:20.2.0-8.g0597158282e.el9.clyso. 24/154 2026-04-01T02:24:02.707 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-rbd-2:20.2.0-8.g0597158282e.el9.clyso.x8 25/154 2026-04-01T02:24:02.707 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-rgw-2:20.2.0-8.g0597158282e.el9.clyso.x8 26/154 2026-04-01T02:24:02.707 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : rbd-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 27/154 2026-04-01T02:24:02.707 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86 28/154 2026-04-01T02:24:02.707 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : rbd-nbd-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 29/154 2026-04-01T02:24:02.707 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-grafana-dashboards-2:20.2.0-8.g0597158282e. 30/154 2026-04-01T02:24:02.707 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.cly 31/154 2026-04-01T02:24:02.707 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.c 32/154 2026-04-01T02:24:02.707 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-mgr-diskprediction-local-2:20.2.0-8.g059715 33/154 2026-04-01T02:24:02.707 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.c 34/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-mgr-modules-core-2:20.2.0-8.g0597158282e.el 35/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso. 36/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-prometheus-alerts-2:20.2.0-8.g0597158282e.e 37/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.no 38/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : cephadm-2:20.2.0-8.g0597158282e.el9.clyso.noarch 39/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : abseil-cpp-20211102.0-4.el9.x86_64 40/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : gperftools-libs-2.9.1-3.el9.x86_64 41/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : grpc-data-1.46.7-10.el9.noarch 42/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libarrow-9.0.0-15.el9.x86_64 43/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libarrow-doc-9.0.0-15.el9.noarch 44/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : liboath-2.6.12-1.el9.x86_64 45/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libunwind-1.6.2-1.el9.x86_64 46/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : luarocks-3.9.2-5.el9.noarch 47/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : parquet-libs-9.0.0-15.el9.x86_64 48/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-asyncssh-2.13.2-5.el9.noarch 49/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-autocommand-2.2.2-8.el9.noarch 50/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-backports-tarfile-1.2.0-1.el9.noarch 51/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-bcrypt-3.2.2-1.el9.x86_64 52/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-cachetools-4.2.4-1.el9.noarch 53/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-certifi-2023.05.07-4.el9.noarch 54/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-cheroot-10.0.1-5.el9.noarch 55/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-cherrypy-18.10.0-5.el9.noarch 56/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-google-auth-1:2.45.0-1.el9.noarch 57/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-grpcio-1.46.7-10.el9.x86_64 58/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-grpcio-tools-1.46.7-10.el9.x86_64 59/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-influxdb-5.3.1-1.el9.noarch 60/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-isodate-0.6.1-3.el9.noarch 61/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-jaraco-8.2.1-3.el9.noarch 62/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-jaraco-classes-3.2.1-5.el9.noarch 63/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-jaraco-collections-3.0.0-8.el9.noarch 64/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-jaraco-context-6.0.1-3.el9.noarch 65/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-jaraco-functools-3.5.0-2.el9.noarch 66/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-jaraco-text-4.0.0-2.el9.noarch 67/154 2026-04-01T02:24:02.709 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-kubernetes-1:26.1.0-3.el9.noarch 68/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-more-itertools-8.12.0-2.el9.noarch 69/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-msgpack-1.0.3-2.el9.x86_64 70/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-natsort-7.1.1-5.el9.noarch 71/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-portend-3.1.0-2.el9.noarch 72/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-pyOpenSSL-21.0.0-1.el9.noarch 73/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-repoze-lru-0.7-16.el9.noarch 74/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-routes-2.5.1-5.el9.noarch 75/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-rsa-4.9-2.el9.noarch 76/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-saml-1.16.0-1.el9.noarch 77/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-tempora-5.0.0-2.el9.noarch 78/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-typing-extensions-4.15.0-1.el9.noarch 79/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-websocket-client-1.2.3-2.el9.noarch 80/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-xmlsec-1.3.13-1.el9.x86_64 81/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-xmltodict-0.12.0-15.el9.noarch 82/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-zc-lockfile-2.0-10.el9.noarch 83/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : re2-1:20211101-20.el9.x86_64 84/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : s3cmd-2.4.0-1.el9.noarch 85/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : thrift-0.15.0-4.el9.x86_64 86/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : bzip2-1.0.8-10.el9_5.x86_64 87/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : c-ares-1.19.1-2.el9_4.x86_64 88/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : cryptsetup-2.7.2-4.el9.x86_64 89/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : fuse-2.9.9-17.el9.x86_64 90/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ledmon-libs-1.1.0-3.el9.x86_64 91/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libconfig-1.7.2-9.el9.x86_64 92/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libgfortran-11.5.0-11.el9.x86_64 93/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libquadmath-11.5.0-11.el9.x86_64 94/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : lmdb-libs-0.9.29-3.el9.x86_64 95/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : mailcap-2.1.49-5.el9.0.2.noarch 96/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : nvme-cli-2.13-1.el9.x86_64 97/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : pciutils-3.7.0-7.el9.x86_64 98/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-cffi-1.14.5-5.el9.x86_64 99/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-cryptography-36.0.1-5.el9_6.x86_64 100/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-ply-3.11-14.el9.0.1.noarch 101/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-pycparser-2.20-6.el9.noarch 102/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-pyparsing-2.4.7-9.el9.0.1.noarch 103/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-requests-2.25.1-10.el9_6.noarch 104/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-urllib3-1.26.5-6.el9_7.1.noarch 105/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : smartmontools-1:7.2-9.el9.x86_64 106/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : unzip-6.0-59.el9.x86_64 107/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : zip-3.0-35.el9.x86_64 108/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : boost-program-options-1.75.0-13.el9_7.x86_64 109/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : flexiblas-3.0.4-8.el9.0.1.x86_64 110/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : flexiblas-netlib-3.0.4-8.el9.0.1.x86_64 111/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : flexiblas-openblas-openmp-3.0.4-8.el9.0.1.x86_64 112/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libnbd-1.20.3-4.el9.x86_64 113/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libpmemobj-1.12.1-1.el9.x86_64 114/154 2026-04-01T02:24:02.710 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : librabbitmq-0.11.0-7.el9.x86_64 115/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : librdkafka-1.6.1-102.el9.x86_64 116/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libstoragemgmt-1.10.1-1.el9.x86_64 117/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : libxslt-1.1.34-13.el9_6.x86_64 118/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : lttng-ust-2.12.0-6.el9.x86_64 119/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : lua-5.4.4-4.el9.x86_64 120/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : openblas-0.3.29-1.el9.x86_64 121/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : openblas-openmp-0.3.29-1.el9.x86_64 122/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : perl-Benchmark-1.23-481.1.el9_6.noarch 123/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : perl-Test-Harness-1:3.42-461.el9.noarch 124/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : protobuf-3.14.0-17.el9_7.x86_64 125/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-babel-2.9.1-2.el9.noarch 126/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-devel-3.9.23-2.el9.x86_64 127/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-jinja2-2.11.3-8.el9_5.noarch 128/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-jmespath-1.0.1-1.el9_7.noarch 129/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-libstoragemgmt-1.10.1-1.el9.x86_64 130/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-lxml-4.6.5-3.el9.x86_64 131/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-markupsafe-1.1.1-12.el9.x86_64 132/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-numpy-1:1.23.5-2.el9_7.x86_64 133/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-numpy-f2py-1:1.23.5-2.el9_7.x86_64 134/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-packaging-20.9-5.el9.noarch 135/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-protobuf-3.14.0-17.el9_7.noarch 136/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-pyasn1-0.4.8-7.el9_7.noarch 137/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-pyasn1-modules-0.4.8-7.el9_7.noarch 138/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-requests-oauthlib-1.3.0-12.el9.noarch 139/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-scipy-1.9.3-2.el9.x86_64 140/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : python3-toml-0.10.2-6.el9.0.1.noarch 141/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : qatlib-24.09.0-1.el9.x86_64 142/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : qatlib-service-24.09.0-1.el9.x86_64 143/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : qatzip-libs-1.3.1-1.el9.x86_64 144/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : socat-1.7.4.1-8.el9.x86_64 145/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : xmlsec1-1.2.29-13.el9.x86_64 146/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : xmlsec1-openssl-1.2.29-13.el9.x86_64 147/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : xmlstarlet-1.6.1-20.el9.x86_64 148/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : lua-devel-5.4.4-4.el9.x86_64 149/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : protobuf-compiler-3.14.0-17.el9_7.x86_64 150/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : librados2-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 151/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : librados2-2:16.2.4-5.el9.x86_64 152/154 2026-04-01T02:24:02.711 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : librbd1-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 153/154 2026-04-01T02:24:02.805 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : librbd1-2:16.2.4-5.el9.x86_64 154/154 2026-04-01T02:24:02.805 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:02.805 INFO:teuthology.orchestra.run.vm03.stdout:Upgraded: 2026-04-01T02:24:02.805 INFO:teuthology.orchestra.run.vm03.stdout: librados2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.805 INFO:teuthology.orchestra.run.vm03.stdout: librbd1-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.805 INFO:teuthology.orchestra.run.vm03.stdout:Installed: 2026-04-01T02:24:02.805 INFO:teuthology.orchestra.run.vm03.stdout: abseil-cpp-20211102.0-4.el9.x86_64 2026-04-01T02:24:02.805 INFO:teuthology.orchestra.run.vm03.stdout: boost-program-options-1.75.0-13.el9_7.x86_64 2026-04-01T02:24:02.805 INFO:teuthology.orchestra.run.vm03.stdout: bzip2-1.0.8-10.el9_5.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: c-ares-1.19.1-2.el9_4.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-grafana-dashboards-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-immutable-object-cache-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-diskprediction-local-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-modules-core-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-prometheus-alerts-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-selinux-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-test-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: cephadm-2:20.2.0-8.g0597158282e.el9.clyso.noarch 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: cryptsetup-2.7.2-4.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: flexiblas-3.0.4-8.el9.0.1.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: flexiblas-netlib-3.0.4-8.el9.0.1.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: flexiblas-openblas-openmp-3.0.4-8.el9.0.1.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: fuse-2.9.9-17.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: gperftools-libs-2.9.1-3.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: grpc-data-1.46.7-10.el9.noarch 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: ledmon-libs-1.1.0-3.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libarrow-9.0.0-15.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libarrow-doc-9.0.0-15.el9.noarch 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libcephfs-daemon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libcephfs-devel-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libcephfs-proxy2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libcephfs2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libcephsqlite-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libconfig-1.7.2-9.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libgfortran-11.5.0-11.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libnbd-1.20.3-4.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: liboath-2.6.12-1.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libpmemobj-1.12.1-1.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libquadmath-11.5.0-11.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: librabbitmq-0.11.0-7.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: librados-devel-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libradosstriper1-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: librdkafka-1.6.1-102.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: librgw2-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libstoragemgmt-1.10.1-1.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libunwind-1.6.2-1.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: libxslt-1.1.34-13.el9_6.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: lmdb-libs-0.9.29-3.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: lttng-ust-2.12.0-6.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: lua-5.4.4-4.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: lua-devel-5.4.4-4.el9.x86_64 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: luarocks-3.9.2-5.el9.noarch 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: mailcap-2.1.49-5.el9.0.2.noarch 2026-04-01T02:24:02.806 INFO:teuthology.orchestra.run.vm03.stdout: nvme-cli-2.13-1.el9.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: openblas-0.3.29-1.el9.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: openblas-openmp-0.3.29-1.el9.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: parquet-libs-9.0.0-15.el9.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: pciutils-3.7.0-7.el9.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: perl-Benchmark-1.23-481.1.el9_6.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: perl-Test-Harness-1:3.42-461.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: protobuf-3.14.0-17.el9_7.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: protobuf-compiler-3.14.0-17.el9_7.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-asyncssh-2.13.2-5.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-autocommand-2.2.2-8.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-babel-2.9.1-2.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-backports-tarfile-1.2.0-1.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-bcrypt-3.2.2-1.el9.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-cachetools-4.2.4-1.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-ceph-argparse-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-cephfs-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-certifi-2023.05.07-4.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-cffi-1.14.5-5.el9.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-cheroot-10.0.1-5.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-cherrypy-18.10.0-5.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-cryptography-36.0.1-5.el9_6.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-devel-3.9.23-2.el9.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-google-auth-1:2.45.0-1.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-grpcio-1.46.7-10.el9.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-grpcio-tools-1.46.7-10.el9.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-influxdb-5.3.1-1.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-isodate-0.6.1-3.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-8.2.1-3.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-classes-3.2.1-5.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-collections-3.0.0-8.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-context-6.0.1-3.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-functools-3.5.0-2.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-text-4.0.0-2.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-jinja2-2.11.3-8.el9_5.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-jmespath-1.0.1-1.el9_7.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-kubernetes-1:26.1.0-3.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-libstoragemgmt-1.10.1-1.el9.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-lxml-4.6.5-3.el9.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-markupsafe-1.1.1-12.el9.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-more-itertools-8.12.0-2.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-msgpack-1.0.3-2.el9.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-natsort-7.1.1-5.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-numpy-1:1.23.5-2.el9_7.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-numpy-f2py-1:1.23.5-2.el9_7.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-packaging-20.9-5.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-ply-3.11-14.el9.0.1.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-portend-3.1.0-2.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-protobuf-3.14.0-17.el9_7.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-pyOpenSSL-21.0.0-1.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-pyasn1-0.4.8-7.el9_7.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-pyasn1-modules-0.4.8-7.el9_7.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-pycparser-2.20-6.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-pyparsing-2.4.7-9.el9.0.1.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-rados-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-rbd-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-repoze-lru-0.7-16.el9.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-requests-2.25.1-10.el9_6.noarch 2026-04-01T02:24:02.807 INFO:teuthology.orchestra.run.vm03.stdout: python3-requests-oauthlib-1.3.0-12.el9.noarch 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: python3-rgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: python3-routes-2.5.1-5.el9.noarch 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: python3-rsa-4.9-2.el9.noarch 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: python3-saml-1.16.0-1.el9.noarch 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: python3-scipy-1.9.3-2.el9.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: python3-tempora-5.0.0-2.el9.noarch 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: python3-toml-0.10.2-6.el9.0.1.noarch 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: python3-typing-extensions-4.15.0-1.el9.noarch 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: python3-urllib3-1.26.5-6.el9_7.1.noarch 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: python3-websocket-client-1.2.3-2.el9.noarch 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: python3-xmlsec-1.3.13-1.el9.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: python3-xmltodict-0.12.0-15.el9.noarch 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: python3-zc-lockfile-2.0-10.el9.noarch 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: qatlib-24.09.0-1.el9.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: qatlib-service-24.09.0-1.el9.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: qatzip-libs-1.3.1-1.el9.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: rbd-fuse-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: rbd-nbd-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: re2-1:20211101-20.el9.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: s3cmd-2.4.0-1.el9.noarch 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: smartmontools-1:7.2-9.el9.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: socat-1.7.4.1-8.el9.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: thrift-0.15.0-4.el9.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: unzip-6.0-59.el9.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: xmlsec1-1.2.29-13.el9.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: xmlsec1-openssl-1.2.29-13.el9.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: xmlstarlet-1.6.1-20.el9.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: zip-3.0-35.el9.x86_64 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:02.808 INFO:teuthology.orchestra.run.vm03.stdout:Complete! 2026-04-01T02:24:02.910 DEBUG:teuthology.parallel:result is None 2026-04-01T02:24:02.910 INFO:teuthology.task.install:Skipping version verification because we have custom repos... 2026-04-01T02:24:02.910 INFO:teuthology.task.install:Skipping version verification because we have custom repos... 2026-04-01T02:24:02.910 INFO:teuthology.task.install:Skipping version verification because we have custom repos... 2026-04-01T02:24:02.911 INFO:teuthology.task.install.util:Shipping valgrind.supp... 2026-04-01T02:24:02.911 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:02.911 DEBUG:teuthology.orchestra.run.vm03:> sudo dd of=/home/ubuntu/cephtest/valgrind.supp 2026-04-01T02:24:02.939 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:02.939 DEBUG:teuthology.orchestra.run.vm06:> sudo dd of=/home/ubuntu/cephtest/valgrind.supp 2026-04-01T02:24:02.970 DEBUG:teuthology.orchestra.run.vm08:> set -ex 2026-04-01T02:24:02.970 DEBUG:teuthology.orchestra.run.vm08:> sudo dd of=/home/ubuntu/cephtest/valgrind.supp 2026-04-01T02:24:03.002 INFO:teuthology.task.install.util:Shipping 'daemon-helper'... 2026-04-01T02:24:03.003 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:03.003 DEBUG:teuthology.orchestra.run.vm03:> sudo dd of=/usr/bin/daemon-helper 2026-04-01T02:24:03.028 DEBUG:teuthology.orchestra.run.vm03:> sudo chmod a=rx -- /usr/bin/daemon-helper 2026-04-01T02:24:03.094 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:03.094 DEBUG:teuthology.orchestra.run.vm06:> sudo dd of=/usr/bin/daemon-helper 2026-04-01T02:24:03.122 DEBUG:teuthology.orchestra.run.vm06:> sudo chmod a=rx -- /usr/bin/daemon-helper 2026-04-01T02:24:03.187 DEBUG:teuthology.orchestra.run.vm08:> set -ex 2026-04-01T02:24:03.187 DEBUG:teuthology.orchestra.run.vm08:> sudo dd of=/usr/bin/daemon-helper 2026-04-01T02:24:03.217 DEBUG:teuthology.orchestra.run.vm08:> sudo chmod a=rx -- /usr/bin/daemon-helper 2026-04-01T02:24:03.287 INFO:teuthology.task.install.util:Shipping 'adjust-ulimits'... 2026-04-01T02:24:03.287 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:03.287 DEBUG:teuthology.orchestra.run.vm03:> sudo dd of=/usr/bin/adjust-ulimits 2026-04-01T02:24:03.317 DEBUG:teuthology.orchestra.run.vm03:> sudo chmod a=rx -- /usr/bin/adjust-ulimits 2026-04-01T02:24:03.385 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:03.385 DEBUG:teuthology.orchestra.run.vm06:> sudo dd of=/usr/bin/adjust-ulimits 2026-04-01T02:24:03.409 DEBUG:teuthology.orchestra.run.vm06:> sudo chmod a=rx -- /usr/bin/adjust-ulimits 2026-04-01T02:24:03.474 DEBUG:teuthology.orchestra.run.vm08:> set -ex 2026-04-01T02:24:03.475 DEBUG:teuthology.orchestra.run.vm08:> sudo dd of=/usr/bin/adjust-ulimits 2026-04-01T02:24:03.499 DEBUG:teuthology.orchestra.run.vm08:> sudo chmod a=rx -- /usr/bin/adjust-ulimits 2026-04-01T02:24:03.564 INFO:teuthology.task.install.util:Shipping 'stdin-killer'... 2026-04-01T02:24:03.564 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:03.564 DEBUG:teuthology.orchestra.run.vm03:> sudo dd of=/usr/bin/stdin-killer 2026-04-01T02:24:03.590 DEBUG:teuthology.orchestra.run.vm03:> sudo chmod a=rx -- /usr/bin/stdin-killer 2026-04-01T02:24:03.655 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:03.655 DEBUG:teuthology.orchestra.run.vm06:> sudo dd of=/usr/bin/stdin-killer 2026-04-01T02:24:03.678 DEBUG:teuthology.orchestra.run.vm06:> sudo chmod a=rx -- /usr/bin/stdin-killer 2026-04-01T02:24:03.739 DEBUG:teuthology.orchestra.run.vm08:> set -ex 2026-04-01T02:24:03.739 DEBUG:teuthology.orchestra.run.vm08:> sudo dd of=/usr/bin/stdin-killer 2026-04-01T02:24:03.767 DEBUG:teuthology.orchestra.run.vm08:> sudo chmod a=rx -- /usr/bin/stdin-killer 2026-04-01T02:24:03.837 INFO:teuthology.run_tasks:Running task ceph... 2026-04-01T02:24:03.876 INFO:tasks.ceph:Making ceph log dir writeable by non-root... 2026-04-01T02:24:03.876 DEBUG:teuthology.orchestra.run.vm03:> sudo chmod 777 /var/log/ceph 2026-04-01T02:24:03.877 DEBUG:teuthology.orchestra.run.vm06:> sudo chmod 777 /var/log/ceph 2026-04-01T02:24:03.879 DEBUG:teuthology.orchestra.run.vm08:> sudo chmod 777 /var/log/ceph 2026-04-01T02:24:03.908 INFO:tasks.ceph:Disabling ceph logrotate... 2026-04-01T02:24:03.908 DEBUG:teuthology.orchestra.run.vm03:> sudo rm -f -- /etc/logrotate.d/ceph 2026-04-01T02:24:03.944 DEBUG:teuthology.orchestra.run.vm06:> sudo rm -f -- /etc/logrotate.d/ceph 2026-04-01T02:24:03.950 DEBUG:teuthology.orchestra.run.vm08:> sudo rm -f -- /etc/logrotate.d/ceph 2026-04-01T02:24:03.978 INFO:tasks.ceph:Creating extra log directories... 2026-04-01T02:24:03.978 DEBUG:teuthology.orchestra.run.vm03:> sudo install -d -m0777 -- /var/log/ceph/valgrind /var/log/ceph/profiling-logger 2026-04-01T02:24:04.012 DEBUG:teuthology.orchestra.run.vm06:> sudo install -d -m0777 -- /var/log/ceph/valgrind /var/log/ceph/profiling-logger 2026-04-01T02:24:04.019 DEBUG:teuthology.orchestra.run.vm08:> sudo install -d -m0777 -- /var/log/ceph/valgrind /var/log/ceph/profiling-logger 2026-04-01T02:24:04.051 INFO:tasks.ceph:Creating ceph cluster ceph... 2026-04-01T02:24:04.051 INFO:tasks.ceph:config {'conf': {'client': {'debug rgw': 20, 'debug rgw dedup': 20, 'setgroup': 'ceph', 'setuser': 'ceph'}, 'global': {'osd_max_pg_log_entries': 10, 'osd_min_pg_log_entries': 10}, 'mgr': {'debug mgr': 20, 'debug ms': 1}, 'mon': {'debug mon': 20, 'debug ms': 1, 'debug paxos': 20}, 'osd': {'bdev async discard': True, 'bdev enable discard': True, 'bluestore allocator': 'bitmap', 'bluestore block size': 96636764160, 'bluestore fsck on mount': True, 'debug bluefs': '1/20', 'debug bluestore': '1/20', 'debug ms': 1, 'debug osd': 20, 'debug rocksdb': '4/10', 'mon osd backfillfull_ratio': 0.85, 'mon osd full ratio': 0.9, 'mon osd nearfull ratio': 0.8, 'osd failsafe full ratio': 0.95, 'osd mclock iops capacity threshold hdd': 49000, 'osd objectstore': 'bluestore', 'osd shutdown pgref assert': True}}, 'fs': 'xfs', 'mkfs_options': None, 'mount_options': None, 'skip_mgr_daemons': False, 'log_ignorelist': ['\\(MDS_ALL_DOWN\\)', '\\(MDS_UP_LESS_THAN_MAX\\)', '\\(PG_AVAILABILITY\\)', '\\(PG_DEGRADED\\)', '\\(POOL_APP_NOT_ENABLED\\)', 'not have an application enabled'], 'cpu_profile': set(), 'cluster': 'ceph', 'mon_bind_msgr2': True, 'mon_bind_addrvec': True} 2026-04-01T02:24:04.051 INFO:tasks.ceph:ctx.config {'archive_path': '/archive/supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps/4640', 'branch': 'wip-sse-s3-on-v20.2.0', 'description': 'rgw/dedup/{beast bluestore-bitmap fixed-3-rgw ignore-pg-availability overrides supported-distros/{rocky_latest} tasks/{0-install test_dedup}}', 'email': None, 'first_in_suite': False, 'flavor': 'default', 'job_id': '4640', 'last_in_suite': False, 'machine_type': 'vps', 'name': 'supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps', 'no_nested_subset': False, 'openstack': [{'volumes': {'count': 4, 'size': 10}}], 'os_type': 'rocky', 'os_version': '9.7', 'overrides': {'admin_socket': {'branch': 'wip-sse-s3-on-v20.2.0'}, 'ansible.cephlab': {'branch': 'main', 'repo': 'https://github.com/kshtsk/ceph-cm-ansible.git', 'skip_tags': 'nagios,monitoring-scripts,hostname,pubkeys,zap,sudoers,kerberos,ntp-client,resolvconf,cpan,nfs', 'vars': {'logical_volumes': {'lv_1': {'scratch_dev': True, 'size': '25%VG', 'vg': 'vg_nvme'}, 'lv_2': {'scratch_dev': True, 'size': '25%VG', 'vg': 'vg_nvme'}, 'lv_3': {'scratch_dev': True, 'size': '25%VG', 'vg': 'vg_nvme'}, 'lv_4': {'scratch_dev': True, 'size': '25%VG', 'vg': 'vg_nvme'}}, 'timezone': 'UTC', 'volume_groups': {'vg_nvme': {'pvs': '/dev/vdb,/dev/vdc,/dev/vdd,/dev/vde'}}}}, 'ceph': {'conf': {'client': {'debug rgw': 20, 'debug rgw dedup': 20, 'setgroup': 'ceph', 'setuser': 'ceph'}, 'global': {'osd_max_pg_log_entries': 10, 'osd_min_pg_log_entries': 10}, 'mgr': {'debug mgr': 20, 'debug ms': 1}, 'mon': {'debug mon': 20, 'debug ms': 1, 'debug paxos': 20}, 'osd': {'bdev async discard': True, 'bdev enable discard': True, 'bluestore allocator': 'bitmap', 'bluestore block size': 96636764160, 'bluestore fsck on mount': True, 'debug bluefs': '1/20', 'debug bluestore': '1/20', 'debug ms': 1, 'debug osd': 20, 'debug rocksdb': '4/10', 'mon osd backfillfull_ratio': 0.85, 'mon osd full ratio': 0.9, 'mon osd nearfull ratio': 0.8, 'osd failsafe full ratio': 0.95, 'osd mclock iops capacity threshold hdd': 49000, 'osd objectstore': 'bluestore', 'osd shutdown pgref assert': True}}, 'flavor': 'default', 'fs': 'xfs', 'log-ignorelist': ['\\(MDS_ALL_DOWN\\)', '\\(MDS_UP_LESS_THAN_MAX\\)', '\\(PG_AVAILABILITY\\)', '\\(PG_DEGRADED\\)', '\\(POOL_APP_NOT_ENABLED\\)', 'not have an application enabled'], 'sha1': '0597158282e6d69429e60df2354a6c8eed0e5bce'}, 'ceph-deploy': {'bluestore': True, 'conf': {'client': {'log file': '/var/log/ceph/ceph-$name.$pid.log'}, 'mon': {}, 'osd': {'bdev async discard': True, 'bdev enable discard': True, 'bluestore block size': 96636764160, 'bluestore fsck on mount': True, 'debug bluefs': '1/20', 'debug bluestore': '1/20', 'debug rocksdb': '4/10', 'mon osd backfillfull_ratio': 0.85, 'mon osd full ratio': 0.9, 'mon osd nearfull ratio': 0.8, 'osd failsafe full ratio': 0.95, 'osd objectstore': 'bluestore'}}, 'fs': 'xfs'}, 'cephadm': {'cephadm_binary_url': 'https://download.ceph.com/rpm-20.2.0/el9/noarch/cephadm', 'containers': {'image': 'harbor.clyso.com/custom-ceph/ceph/ceph:sse-s3-kmip-preview-not-for-production-1'}}, 'install': {'ceph': {'flavor': 'default', 'sha1': '0597158282e6d69429e60df2354a6c8eed0e5bce'}, 'extra_system_packages': {'deb': ['python3-jmespath', 'python3-xmltodict', 's3cmd'], 'rpm': ['bzip2', 'perl-Test-Harness', 'python3-jmespath', 'python3-xmltodict', 's3cmd']}, 'repos': [{'name': 'ceph-source', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/SRPMS'}, {'name': 'ceph-noarch', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/noarch'}, {'name': 'ceph', 'priority': 1, 'url': 'https://s3.clyso.com/ces-packages/components/ceph-debug/rpm-20.2.0-8-g0597158282e/el9.clyso/x86_64'}]}, 'rgw': {'frontend': 'beast', 'storage classes': {'FROZEN': None, 'LUKEWARM': None}}, 'selinux': {'allowlist': ['scontext=system_u:system_r:getty_t:s0']}, 'thrashosds': {'bdev_inject_crash': 2, 'bdev_inject_crash_probability': 0.5}, 'workunit': {'branch': 'tt-20.2.0-sse-s3-kmip-preview-not-for-production-1', 'sha1': '99e8bef8f767b591604d6078b7861a00c2936d53'}}, 'owner': 'supriti', 'priority': 1000, 'repo': 'https://github.com/ceph/ceph.git', 'roles': [['mon.a', 'mon.c', 'mgr.y', 'osd.0', 'osd.1', 'osd.2', 'osd.3', 'client.0'], ['mon.b', 'mgr.x', 'osd.4', 'osd.5', 'osd.6', 'osd.7', 'client.1'], ['client.2']], 'seed': 3272, 'sha1': '0597158282e6d69429e60df2354a6c8eed0e5bce', 'sleep_before_teardown': 0, 'suite': 'rgw', 'suite_branch': 'tt-20.2.0-sse-s3-kmip-preview-not-for-production-1', 'suite_path': '/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa', 'suite_relpath': 'qa', 'suite_repo': 'http://git.local/ceph.git', 'suite_sha1': '99e8bef8f767b591604d6078b7861a00c2936d53', 'targets': {'vm03.local': 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBJZwDUOh3zvib7TVzo4Y24n4OUReeoEY1l5B0ITkmg1Alqtlro/JVK/7fS22qcxfbF2hh6yVUub8V06OJzE5OGQ=', 'vm06.local': 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBJufWi95YzKp8OS5O6guCB6+nO+jN5Mpb2ZbTmlHjYf0yAxVcP0LQ3WkYeow+7e1jyetVfeP7zc+9ymQ28vOd6w=', 'vm08.local': 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBCc0oOROpmKveC/LHgE0rrQ1CSaqp95S2yb7Ecx/0YH8RexBcAcwxPSmfLVB15OIYJQkCjzsm+mFQBHlSZ5xiBc='}, 'tasks': [{'internal.save_config': None}, {'internal.check_lock': None}, {'internal.add_remotes': None}, {'console_log': None}, {'internal.connect': None}, {'internal.push_inventory': None}, {'internal.serialize_remote_roles': None}, {'internal.check_conflict': None}, {'internal.check_ceph_data': None}, {'internal.vm_setup': None}, {'internal.base': None}, {'internal.archive_upload': None}, {'internal.archive': None}, {'internal.coredump': None}, {'internal.sudo': None}, {'internal.syslog': None}, {'internal.timer': None}, {'pcp': None}, {'selinux': None}, {'ansible.cephlab': None}, {'clock': None}, {'install': None}, {'ceph': None}, {'openssl_keys': None}, {'rgw': ['client.0', 'client.1', 'client.2']}, {'tox': ['client.0']}, {'tox': ['client.0']}, {'dedup-tests': {'client.0': {'rgw_server': 'client.0'}}}], 'teuthology': {'fragments_dropped': [], 'meta': {}, 'postmerge': []}, 'teuthology_branch': 'uv2', 'teuthology_repo': 'https://github.com/kshtsk/teuthology', 'teuthology_sha1': 'a59626679648f962bca99d20d35578f2998c8f37', 'timestamp': '2026-03-31_23:51:22', 'tube': 'vps', 'user': 'supriti', 'verbose': False, 'worker_log': '/home/teuthos/.teuthology/dispatcher/dispatcher.vps.282426'} 2026-04-01T02:24:04.051 DEBUG:teuthology.orchestra.run.vm03:> install -d -m0755 -- /home/ubuntu/cephtest/ceph.data 2026-04-01T02:24:04.083 DEBUG:teuthology.orchestra.run.vm06:> install -d -m0755 -- /home/ubuntu/cephtest/ceph.data 2026-04-01T02:24:04.088 DEBUG:teuthology.orchestra.run.vm08:> install -d -m0755 -- /home/ubuntu/cephtest/ceph.data 2026-04-01T02:24:04.107 DEBUG:teuthology.orchestra.run.vm03:> sudo install -d -m0777 -- /var/run/ceph 2026-04-01T02:24:04.143 DEBUG:teuthology.orchestra.run.vm06:> sudo install -d -m0777 -- /var/run/ceph 2026-04-01T02:24:04.147 DEBUG:teuthology.orchestra.run.vm08:> sudo install -d -m0777 -- /var/run/ceph 2026-04-01T02:24:04.173 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:04.173 DEBUG:teuthology.orchestra.run.vm03:> dd if=/scratch_devs of=/dev/stdout 2026-04-01T02:24:04.229 DEBUG:teuthology.misc:devs=['/dev/vg_nvme/lv_1', '/dev/vg_nvme/lv_2', '/dev/vg_nvme/lv_3', '/dev/vg_nvme/lv_4'] 2026-04-01T02:24:04.229 DEBUG:teuthology.orchestra.run.vm03:> stat /dev/vg_nvme/lv_1 2026-04-01T02:24:04.286 INFO:teuthology.orchestra.run.vm03.stdout: File: /dev/vg_nvme/lv_1 -> ../dm-0 2026-04-01T02:24:04.286 INFO:teuthology.orchestra.run.vm03.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-04-01T02:24:04.286 INFO:teuthology.orchestra.run.vm03.stdout:Device: 5h/5d Inode: 1067 Links: 1 2026-04-01T02:24:04.286 INFO:teuthology.orchestra.run.vm03.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-04-01T02:24:04.286 INFO:teuthology.orchestra.run.vm03.stdout:Context: system_u:object_r:device_t:s0 2026-04-01T02:24:04.286 INFO:teuthology.orchestra.run.vm03.stdout:Access: 2026-04-01 02:24:01.400379252 +0000 2026-04-01T02:24:04.286 INFO:teuthology.orchestra.run.vm03.stdout:Modify: 2026-04-01 02:23:39.123359448 +0000 2026-04-01T02:24:04.286 INFO:teuthology.orchestra.run.vm03.stdout:Change: 2026-04-01 02:23:39.123359448 +0000 2026-04-01T02:24:04.286 INFO:teuthology.orchestra.run.vm03.stdout: Birth: 2026-04-01 02:23:39.123359448 +0000 2026-04-01T02:24:04.286 DEBUG:teuthology.orchestra.run.vm03:> sudo dd if=/dev/vg_nvme/lv_1 of=/dev/null count=1 2026-04-01T02:24:04.352 INFO:teuthology.orchestra.run.vm03.stderr:1+0 records in 2026-04-01T02:24:04.352 INFO:teuthology.orchestra.run.vm03.stderr:1+0 records out 2026-04-01T02:24:04.352 INFO:teuthology.orchestra.run.vm03.stderr:512 bytes copied, 0.000169357 s, 3.0 MB/s 2026-04-01T02:24:04.353 DEBUG:teuthology.orchestra.run.vm03:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_1 2026-04-01T02:24:04.411 DEBUG:teuthology.orchestra.run.vm03:> stat /dev/vg_nvme/lv_2 2026-04-01T02:24:04.469 INFO:teuthology.orchestra.run.vm03.stdout: File: /dev/vg_nvme/lv_2 -> ../dm-1 2026-04-01T02:24:04.469 INFO:teuthology.orchestra.run.vm03.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-04-01T02:24:04.469 INFO:teuthology.orchestra.run.vm03.stdout:Device: 5h/5d Inode: 1057 Links: 1 2026-04-01T02:24:04.469 INFO:teuthology.orchestra.run.vm03.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-04-01T02:24:04.469 INFO:teuthology.orchestra.run.vm03.stdout:Context: system_u:object_r:device_t:s0 2026-04-01T02:24:04.469 INFO:teuthology.orchestra.run.vm03.stdout:Access: 2026-04-01 02:24:01.400379252 +0000 2026-04-01T02:24:04.469 INFO:teuthology.orchestra.run.vm03.stdout:Modify: 2026-04-01 02:23:39.114359440 +0000 2026-04-01T02:24:04.469 INFO:teuthology.orchestra.run.vm03.stdout:Change: 2026-04-01 02:23:39.114359440 +0000 2026-04-01T02:24:04.469 INFO:teuthology.orchestra.run.vm03.stdout: Birth: 2026-04-01 02:23:39.114359440 +0000 2026-04-01T02:24:04.469 DEBUG:teuthology.orchestra.run.vm03:> sudo dd if=/dev/vg_nvme/lv_2 of=/dev/null count=1 2026-04-01T02:24:04.536 INFO:teuthology.orchestra.run.vm03.stderr:1+0 records in 2026-04-01T02:24:04.537 INFO:teuthology.orchestra.run.vm03.stderr:1+0 records out 2026-04-01T02:24:04.537 INFO:teuthology.orchestra.run.vm03.stderr:512 bytes copied, 0.000241812 s, 2.1 MB/s 2026-04-01T02:24:04.538 DEBUG:teuthology.orchestra.run.vm03:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_2 2026-04-01T02:24:04.595 DEBUG:teuthology.orchestra.run.vm03:> stat /dev/vg_nvme/lv_3 2026-04-01T02:24:04.651 INFO:teuthology.orchestra.run.vm03.stdout: File: /dev/vg_nvme/lv_3 -> ../dm-2 2026-04-01T02:24:04.651 INFO:teuthology.orchestra.run.vm03.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-04-01T02:24:04.651 INFO:teuthology.orchestra.run.vm03.stdout:Device: 5h/5d Inode: 1074 Links: 1 2026-04-01T02:24:04.651 INFO:teuthology.orchestra.run.vm03.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-04-01T02:24:04.651 INFO:teuthology.orchestra.run.vm03.stdout:Context: system_u:object_r:device_t:s0 2026-04-01T02:24:04.651 INFO:teuthology.orchestra.run.vm03.stdout:Access: 2026-04-01 02:24:01.401379254 +0000 2026-04-01T02:24:04.651 INFO:teuthology.orchestra.run.vm03.stdout:Modify: 2026-04-01 02:23:39.125359450 +0000 2026-04-01T02:24:04.651 INFO:teuthology.orchestra.run.vm03.stdout:Change: 2026-04-01 02:23:39.125359450 +0000 2026-04-01T02:24:04.651 INFO:teuthology.orchestra.run.vm03.stdout: Birth: 2026-04-01 02:23:39.125359450 +0000 2026-04-01T02:24:04.651 DEBUG:teuthology.orchestra.run.vm03:> sudo dd if=/dev/vg_nvme/lv_3 of=/dev/null count=1 2026-04-01T02:24:04.715 INFO:teuthology.orchestra.run.vm03.stderr:1+0 records in 2026-04-01T02:24:04.715 INFO:teuthology.orchestra.run.vm03.stderr:1+0 records out 2026-04-01T02:24:04.715 INFO:teuthology.orchestra.run.vm03.stderr:512 bytes copied, 9.4837e-05 s, 5.4 MB/s 2026-04-01T02:24:04.716 DEBUG:teuthology.orchestra.run.vm03:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_3 2026-04-01T02:24:04.777 DEBUG:teuthology.orchestra.run.vm03:> stat /dev/vg_nvme/lv_4 2026-04-01T02:24:04.835 INFO:teuthology.orchestra.run.vm03.stdout: File: /dev/vg_nvme/lv_4 -> ../dm-3 2026-04-01T02:24:04.835 INFO:teuthology.orchestra.run.vm03.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-04-01T02:24:04.835 INFO:teuthology.orchestra.run.vm03.stdout:Device: 5h/5d Inode: 1065 Links: 1 2026-04-01T02:24:04.835 INFO:teuthology.orchestra.run.vm03.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-04-01T02:24:04.835 INFO:teuthology.orchestra.run.vm03.stdout:Context: system_u:object_r:device_t:s0 2026-04-01T02:24:04.835 INFO:teuthology.orchestra.run.vm03.stdout:Access: 2026-04-01 02:24:01.401379254 +0000 2026-04-01T02:24:04.835 INFO:teuthology.orchestra.run.vm03.stdout:Modify: 2026-04-01 02:23:39.116359442 +0000 2026-04-01T02:24:04.835 INFO:teuthology.orchestra.run.vm03.stdout:Change: 2026-04-01 02:23:39.116359442 +0000 2026-04-01T02:24:04.835 INFO:teuthology.orchestra.run.vm03.stdout: Birth: 2026-04-01 02:23:39.116359442 +0000 2026-04-01T02:24:04.835 DEBUG:teuthology.orchestra.run.vm03:> sudo dd if=/dev/vg_nvme/lv_4 of=/dev/null count=1 2026-04-01T02:24:04.900 INFO:teuthology.orchestra.run.vm03.stderr:1+0 records in 2026-04-01T02:24:04.900 INFO:teuthology.orchestra.run.vm03.stderr:1+0 records out 2026-04-01T02:24:04.900 INFO:teuthology.orchestra.run.vm03.stderr:512 bytes copied, 0.000170449 s, 3.0 MB/s 2026-04-01T02:24:04.901 DEBUG:teuthology.orchestra.run.vm03:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_4 2026-04-01T02:24:04.960 INFO:tasks.ceph:osd dev map: {'osd.0': '/dev/vg_nvme/lv_1', 'osd.1': '/dev/vg_nvme/lv_2', 'osd.2': '/dev/vg_nvme/lv_3', 'osd.3': '/dev/vg_nvme/lv_4'} 2026-04-01T02:24:04.960 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:04.960 DEBUG:teuthology.orchestra.run.vm06:> dd if=/scratch_devs of=/dev/stdout 2026-04-01T02:24:04.981 DEBUG:teuthology.misc:devs=['/dev/vg_nvme/lv_1', '/dev/vg_nvme/lv_2', '/dev/vg_nvme/lv_3', '/dev/vg_nvme/lv_4'] 2026-04-01T02:24:04.981 DEBUG:teuthology.orchestra.run.vm06:> stat /dev/vg_nvme/lv_1 2026-04-01T02:24:05.038 INFO:teuthology.orchestra.run.vm06.stdout: File: /dev/vg_nvme/lv_1 -> ../dm-0 2026-04-01T02:24:05.038 INFO:teuthology.orchestra.run.vm06.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-04-01T02:24:05.038 INFO:teuthology.orchestra.run.vm06.stdout:Device: 5h/5d Inode: 1039 Links: 1 2026-04-01T02:24:05.038 INFO:teuthology.orchestra.run.vm06.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-04-01T02:24:05.038 INFO:teuthology.orchestra.run.vm06.stdout:Context: system_u:object_r:device_t:s0 2026-04-01T02:24:05.038 INFO:teuthology.orchestra.run.vm06.stdout:Access: 2026-04-01 02:23:56.658193943 +0000 2026-04-01T02:24:05.038 INFO:teuthology.orchestra.run.vm06.stdout:Modify: 2026-04-01 02:23:33.876193852 +0000 2026-04-01T02:24:05.038 INFO:teuthology.orchestra.run.vm06.stdout:Change: 2026-04-01 02:23:33.876193852 +0000 2026-04-01T02:24:05.038 INFO:teuthology.orchestra.run.vm06.stdout: Birth: 2026-04-01 02:23:33.876193852 +0000 2026-04-01T02:24:05.038 DEBUG:teuthology.orchestra.run.vm06:> sudo dd if=/dev/vg_nvme/lv_1 of=/dev/null count=1 2026-04-01T02:24:05.100 INFO:teuthology.orchestra.run.vm06.stderr:1+0 records in 2026-04-01T02:24:05.100 INFO:teuthology.orchestra.run.vm06.stderr:1+0 records out 2026-04-01T02:24:05.100 INFO:teuthology.orchestra.run.vm06.stderr:512 bytes copied, 9.9427e-05 s, 5.1 MB/s 2026-04-01T02:24:05.101 DEBUG:teuthology.orchestra.run.vm06:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_1 2026-04-01T02:24:05.159 DEBUG:teuthology.orchestra.run.vm06:> stat /dev/vg_nvme/lv_2 2026-04-01T02:24:05.217 INFO:teuthology.orchestra.run.vm06.stdout: File: /dev/vg_nvme/lv_2 -> ../dm-1 2026-04-01T02:24:05.217 INFO:teuthology.orchestra.run.vm06.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-04-01T02:24:05.217 INFO:teuthology.orchestra.run.vm06.stdout:Device: 5h/5d Inode: 1043 Links: 1 2026-04-01T02:24:05.217 INFO:teuthology.orchestra.run.vm06.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-04-01T02:24:05.217 INFO:teuthology.orchestra.run.vm06.stdout:Context: system_u:object_r:device_t:s0 2026-04-01T02:24:05.217 INFO:teuthology.orchestra.run.vm06.stdout:Access: 2026-04-01 02:23:56.659193944 +0000 2026-04-01T02:24:05.217 INFO:teuthology.orchestra.run.vm06.stdout:Modify: 2026-04-01 02:23:33.876193852 +0000 2026-04-01T02:24:05.217 INFO:teuthology.orchestra.run.vm06.stdout:Change: 2026-04-01 02:23:33.876193852 +0000 2026-04-01T02:24:05.217 INFO:teuthology.orchestra.run.vm06.stdout: Birth: 2026-04-01 02:23:33.876193852 +0000 2026-04-01T02:24:05.217 DEBUG:teuthology.orchestra.run.vm06:> sudo dd if=/dev/vg_nvme/lv_2 of=/dev/null count=1 2026-04-01T02:24:05.281 INFO:teuthology.orchestra.run.vm06.stderr:1+0 records in 2026-04-01T02:24:05.281 INFO:teuthology.orchestra.run.vm06.stderr:1+0 records out 2026-04-01T02:24:05.281 INFO:teuthology.orchestra.run.vm06.stderr:512 bytes copied, 0.000135244 s, 3.8 MB/s 2026-04-01T02:24:05.282 DEBUG:teuthology.orchestra.run.vm06:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_2 2026-04-01T02:24:05.343 DEBUG:teuthology.orchestra.run.vm06:> stat /dev/vg_nvme/lv_3 2026-04-01T02:24:05.399 INFO:teuthology.orchestra.run.vm06.stdout: File: /dev/vg_nvme/lv_3 -> ../dm-2 2026-04-01T02:24:05.400 INFO:teuthology.orchestra.run.vm06.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-04-01T02:24:05.400 INFO:teuthology.orchestra.run.vm06.stdout:Device: 5h/5d Inode: 1050 Links: 1 2026-04-01T02:24:05.400 INFO:teuthology.orchestra.run.vm06.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-04-01T02:24:05.400 INFO:teuthology.orchestra.run.vm06.stdout:Context: system_u:object_r:device_t:s0 2026-04-01T02:24:05.400 INFO:teuthology.orchestra.run.vm06.stdout:Access: 2026-04-01 02:23:56.659193944 +0000 2026-04-01T02:24:05.400 INFO:teuthology.orchestra.run.vm06.stdout:Modify: 2026-04-01 02:23:33.880193846 +0000 2026-04-01T02:24:05.400 INFO:teuthology.orchestra.run.vm06.stdout:Change: 2026-04-01 02:23:33.880193846 +0000 2026-04-01T02:24:05.400 INFO:teuthology.orchestra.run.vm06.stdout: Birth: 2026-04-01 02:23:33.880193846 +0000 2026-04-01T02:24:05.400 DEBUG:teuthology.orchestra.run.vm06:> sudo dd if=/dev/vg_nvme/lv_3 of=/dev/null count=1 2026-04-01T02:24:05.465 INFO:teuthology.orchestra.run.vm06.stderr:1+0 records in 2026-04-01T02:24:05.466 INFO:teuthology.orchestra.run.vm06.stderr:1+0 records out 2026-04-01T02:24:05.466 INFO:teuthology.orchestra.run.vm06.stderr:512 bytes copied, 0.000132898 s, 3.9 MB/s 2026-04-01T02:24:05.467 DEBUG:teuthology.orchestra.run.vm06:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_3 2026-04-01T02:24:05.523 DEBUG:teuthology.orchestra.run.vm06:> stat /dev/vg_nvme/lv_4 2026-04-01T02:24:05.578 INFO:teuthology.orchestra.run.vm06.stdout: File: /dev/vg_nvme/lv_4 -> ../dm-3 2026-04-01T02:24:05.578 INFO:teuthology.orchestra.run.vm06.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-04-01T02:24:05.578 INFO:teuthology.orchestra.run.vm06.stdout:Device: 5h/5d Inode: 1058 Links: 1 2026-04-01T02:24:05.578 INFO:teuthology.orchestra.run.vm06.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-04-01T02:24:05.578 INFO:teuthology.orchestra.run.vm06.stdout:Context: system_u:object_r:device_t:s0 2026-04-01T02:24:05.578 INFO:teuthology.orchestra.run.vm06.stdout:Access: 2026-04-01 02:23:56.659193944 +0000 2026-04-01T02:24:05.578 INFO:teuthology.orchestra.run.vm06.stdout:Modify: 2026-04-01 02:23:33.883193841 +0000 2026-04-01T02:24:05.578 INFO:teuthology.orchestra.run.vm06.stdout:Change: 2026-04-01 02:23:33.883193841 +0000 2026-04-01T02:24:05.578 INFO:teuthology.orchestra.run.vm06.stdout: Birth: 2026-04-01 02:23:33.883193841 +0000 2026-04-01T02:24:05.578 DEBUG:teuthology.orchestra.run.vm06:> sudo dd if=/dev/vg_nvme/lv_4 of=/dev/null count=1 2026-04-01T02:24:05.640 INFO:teuthology.orchestra.run.vm06.stderr:1+0 records in 2026-04-01T02:24:05.640 INFO:teuthology.orchestra.run.vm06.stderr:1+0 records out 2026-04-01T02:24:05.640 INFO:teuthology.orchestra.run.vm06.stderr:512 bytes copied, 0.000108573 s, 4.7 MB/s 2026-04-01T02:24:05.641 DEBUG:teuthology.orchestra.run.vm06:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_4 2026-04-01T02:24:05.701 INFO:tasks.ceph:osd dev map: {'osd.4': '/dev/vg_nvme/lv_1', 'osd.5': '/dev/vg_nvme/lv_2', 'osd.6': '/dev/vg_nvme/lv_3', 'osd.7': '/dev/vg_nvme/lv_4'} 2026-04-01T02:24:05.701 INFO:tasks.ceph:remote_to_roles_to_devs: {Remote(name='ubuntu@vm03.local'): {'osd.0': '/dev/vg_nvme/lv_1', 'osd.1': '/dev/vg_nvme/lv_2', 'osd.2': '/dev/vg_nvme/lv_3', 'osd.3': '/dev/vg_nvme/lv_4'}, Remote(name='ubuntu@vm06.local'): {'osd.4': '/dev/vg_nvme/lv_1', 'osd.5': '/dev/vg_nvme/lv_2', 'osd.6': '/dev/vg_nvme/lv_3', 'osd.7': '/dev/vg_nvme/lv_4'}} 2026-04-01T02:24:05.701 INFO:tasks.ceph:Generating config... 2026-04-01T02:24:05.701 INFO:tasks.ceph:[client] debug rgw = 20 2026-04-01T02:24:05.701 INFO:tasks.ceph:[client] debug rgw dedup = 20 2026-04-01T02:24:05.701 INFO:tasks.ceph:[client] setgroup = ceph 2026-04-01T02:24:05.701 INFO:tasks.ceph:[client] setuser = ceph 2026-04-01T02:24:05.701 INFO:tasks.ceph:[global] osd_max_pg_log_entries = 10 2026-04-01T02:24:05.701 INFO:tasks.ceph:[global] osd_min_pg_log_entries = 10 2026-04-01T02:24:05.701 INFO:tasks.ceph:[mgr] debug mgr = 20 2026-04-01T02:24:05.701 INFO:tasks.ceph:[mgr] debug ms = 1 2026-04-01T02:24:05.701 INFO:tasks.ceph:[mon] debug mon = 20 2026-04-01T02:24:05.701 INFO:tasks.ceph:[mon] debug ms = 1 2026-04-01T02:24:05.701 INFO:tasks.ceph:[mon] debug paxos = 20 2026-04-01T02:24:05.701 INFO:tasks.ceph:[osd] bdev async discard = True 2026-04-01T02:24:05.701 INFO:tasks.ceph:[osd] bdev enable discard = True 2026-04-01T02:24:05.701 INFO:tasks.ceph:[osd] bluestore allocator = bitmap 2026-04-01T02:24:05.701 INFO:tasks.ceph:[osd] bluestore block size = 96636764160 2026-04-01T02:24:05.701 INFO:tasks.ceph:[osd] bluestore fsck on mount = True 2026-04-01T02:24:05.702 INFO:tasks.ceph:[osd] debug bluefs = 1/20 2026-04-01T02:24:05.702 INFO:tasks.ceph:[osd] debug bluestore = 1/20 2026-04-01T02:24:05.702 INFO:tasks.ceph:[osd] debug ms = 1 2026-04-01T02:24:05.702 INFO:tasks.ceph:[osd] debug osd = 20 2026-04-01T02:24:05.702 INFO:tasks.ceph:[osd] debug rocksdb = 4/10 2026-04-01T02:24:05.702 INFO:tasks.ceph:[osd] mon osd backfillfull_ratio = 0.85 2026-04-01T02:24:05.702 INFO:tasks.ceph:[osd] mon osd full ratio = 0.9 2026-04-01T02:24:05.702 INFO:tasks.ceph:[osd] mon osd nearfull ratio = 0.8 2026-04-01T02:24:05.702 INFO:tasks.ceph:[osd] osd failsafe full ratio = 0.95 2026-04-01T02:24:05.702 INFO:tasks.ceph:[osd] osd mclock iops capacity threshold hdd = 49000 2026-04-01T02:24:05.702 INFO:tasks.ceph:[osd] osd objectstore = bluestore 2026-04-01T02:24:05.702 INFO:tasks.ceph:[osd] osd shutdown pgref assert = True 2026-04-01T02:24:05.702 INFO:tasks.ceph:Setting up mon.a... 2026-04-01T02:24:05.702 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool --create-keyring /etc/ceph/ceph.keyring 2026-04-01T02:24:05.738 INFO:teuthology.orchestra.run.vm03.stdout:creating /etc/ceph/ceph.keyring 2026-04-01T02:24:05.741 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool --gen-key --name=mon. /etc/ceph/ceph.keyring 2026-04-01T02:24:05.825 DEBUG:teuthology.orchestra.run.vm03:> sudo chmod 0644 /etc/ceph/ceph.keyring 2026-04-01T02:24:05.852 DEBUG:tasks.ceph:Ceph mon addresses: [('mon.a', '192.168.123.103'), ('mon.c', '[v2:192.168.123.103:3301,v1:192.168.123.103:6790]'), ('mon.b', '192.168.123.106')] 2026-04-01T02:24:05.852 DEBUG:tasks.ceph:writing out conf {'global': {'chdir': '', 'pid file': '/var/run/ceph/$cluster-$name.pid', 'auth supported': 'cephx', 'filestore xattr use omap': 'true', 'mon clock drift allowed': '1.000', 'osd crush chooseleaf type': '0', 'auth debug': 'true', 'ms die on old message': 'true', 'ms die on bug': 'true', 'mon max pg per osd': '10000', 'mon pg warn max object skew': '0', 'osd_pool_default_pg_autoscale_mode': 'off', 'osd pool default size': '2', 'mon osd allow primary affinity': 'true', 'mon osd allow pg remap': 'true', 'mon warn on legacy crush tunables': 'false', 'mon warn on crush straw calc version zero': 'false', 'mon warn on no sortbitwise': 'false', 'mon warn on osd down out interval zero': 'false', 'mon warn on too few osds': 'false', 'mon_warn_on_pool_pg_num_not_power_of_two': 'false', 'mon_warn_on_pool_no_redundancy': 'false', 'mon_allow_pool_size_one': 'true', 'osd pool default erasure code profile': 'plugin=isa technique=reed_sol_van k=2 m=1 crush-failure-domain=osd', 'osd default data pool replay window': '5', 'mon allow pool delete': 'true', 'mon cluster log file level': 'debug', 'debug asserts on shutdown': 'true', 'mon health detail to clog': 'false', 'mon host': '192.168.123.103,[v2:192.168.123.103:3301,v1:192.168.123.103:6790],192.168.123.106', 'osd_max_pg_log_entries': 10, 'osd_min_pg_log_entries': 10}, 'osd': {'osd journal size': '100', 'osd scrub load threshold': '5.0', 'osd scrub max interval': '600', 'osd mclock profile': 'high_recovery_ops', 'osd recover clone overlap': 'true', 'osd recovery max chunk': '1048576', 'osd debug shutdown': 'true', 'osd debug op order': 'true', 'osd debug verify stray on activate': 'true', 'osd debug trim objects': 'true', 'osd open classes on start': 'true', 'osd debug pg log writeout': 'true', 'osd deep scrub update digest min age': '30', 'osd map max advance': '10', 'journal zero on create': 'true', 'filestore ondisk finisher threads': '3', 'filestore apply finisher threads': '3', 'bdev debug aio': 'true', 'osd debug misdirected ops': 'true', 'bdev async discard': True, 'bdev enable discard': True, 'bluestore allocator': 'bitmap', 'bluestore block size': 96636764160, 'bluestore fsck on mount': True, 'debug bluefs': '1/20', 'debug bluestore': '1/20', 'debug ms': 1, 'debug osd': 20, 'debug rocksdb': '4/10', 'mon osd backfillfull_ratio': 0.85, 'mon osd full ratio': 0.9, 'mon osd nearfull ratio': 0.8, 'osd failsafe full ratio': 0.95, 'osd mclock iops capacity threshold hdd': 49000, 'osd objectstore': 'bluestore', 'osd shutdown pgref assert': True}, 'mgr': {'debug ms': 1, 'debug mgr': 20, 'debug mon': '20', 'debug auth': '20', 'mon reweight min pgs per osd': '4', 'mon reweight min bytes per osd': '10', 'mgr/telemetry/nag': 'false'}, 'mon': {'debug ms': 1, 'debug mon': 20, 'debug paxos': 20, 'debug auth': '20', 'mon data avail warn': '5', 'mon mgr mkfs grace': '240', 'mon reweight min pgs per osd': '4', 'mon osd reporter subtree level': 'osd', 'mon osd prime pg temp': 'true', 'mon reweight min bytes per osd': '10', 'auth mon ticket ttl': '660', 'auth service ticket ttl': '240', 'mon_warn_on_insecure_global_id_reclaim': 'false', 'mon_warn_on_insecure_global_id_reclaim_allowed': 'false', 'mon_down_mkfs_grace': '2m', 'mon_warn_on_filestore_osds': 'false'}, 'client': {'rgw cache enabled': 'true', 'rgw enable ops log': 'true', 'rgw enable usage log': 'true', 'log file': '/var/log/ceph/$cluster-$name.$pid.log', 'admin socket': '/var/run/ceph/$cluster-$name.$pid.asok', 'debug rgw': 20, 'debug rgw dedup': 20, 'setgroup': 'ceph', 'setuser': 'ceph'}, 'mon.a': {}, 'mon.c': {}, 'mon.b': {}} 2026-04-01T02:24:05.852 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:05.852 DEBUG:teuthology.orchestra.run.vm03:> dd of=/home/ubuntu/cephtest/ceph.tmp.conf 2026-04-01T02:24:05.908 DEBUG:teuthology.orchestra.run.vm03:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage monmaptool -c /home/ubuntu/cephtest/ceph.tmp.conf --create --clobber --enable-all-features --add a 192.168.123.103 --addv c '[v2:192.168.123.103:3301,v1:192.168.123.103:6790]' --add b 192.168.123.106 --print /home/ubuntu/cephtest/ceph.monmap 2026-04-01T02:24:05.988 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:05.988 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:05.988 INFO:teuthology.orchestra.run.vm03.stdout:monmaptool: monmap file /home/ubuntu/cephtest/ceph.monmap 2026-04-01T02:24:05.988 INFO:teuthology.orchestra.run.vm03.stdout:monmaptool: generated fsid 1338c6ab-9330-4cd3-91bf-71d5668f30ea 2026-04-01T02:24:05.988 INFO:teuthology.orchestra.run.vm03.stdout:setting min_mon_release = tentacle 2026-04-01T02:24:05.988 INFO:teuthology.orchestra.run.vm03.stdout:epoch 0 2026-04-01T02:24:05.988 INFO:teuthology.orchestra.run.vm03.stdout:fsid 1338c6ab-9330-4cd3-91bf-71d5668f30ea 2026-04-01T02:24:05.988 INFO:teuthology.orchestra.run.vm03.stdout:last_changed 2026-04-01T02:24:05.987203+0000 2026-04-01T02:24:05.988 INFO:teuthology.orchestra.run.vm03.stdout:created 2026-04-01T02:24:05.987203+0000 2026-04-01T02:24:05.988 INFO:teuthology.orchestra.run.vm03.stdout:min_mon_release 20 (tentacle) 2026-04-01T02:24:05.988 INFO:teuthology.orchestra.run.vm03.stdout:election_strategy: 1 2026-04-01T02:24:05.988 INFO:teuthology.orchestra.run.vm03.stdout:0: [v2:192.168.123.103:3300/0,v1:192.168.123.103:6789/0] mon.a 2026-04-01T02:24:05.988 INFO:teuthology.orchestra.run.vm03.stdout:1: [v2:192.168.123.106:3300/0,v1:192.168.123.106:6789/0] mon.b 2026-04-01T02:24:05.988 INFO:teuthology.orchestra.run.vm03.stdout:2: [v2:192.168.123.103:3301/0,v1:192.168.123.103:6790/0] mon.c 2026-04-01T02:24:05.988 INFO:teuthology.orchestra.run.vm03.stdout:monmaptool: writing epoch 0 to /home/ubuntu/cephtest/ceph.monmap (3 monitors) 2026-04-01T02:24:05.990 DEBUG:teuthology.orchestra.run.vm03:> rm -- /home/ubuntu/cephtest/ceph.tmp.conf 2026-04-01T02:24:06.045 INFO:tasks.ceph:Writing /etc/ceph/ceph.conf for FSID 1338c6ab-9330-4cd3-91bf-71d5668f30ea... 2026-04-01T02:24:06.046 DEBUG:teuthology.orchestra.run.vm03:> sudo mkdir -p /etc/ceph && sudo chmod 0755 /etc/ceph && sudo tee /etc/ceph/ceph.conf && sudo chmod 0644 /etc/ceph/ceph.conf > /dev/null 2026-04-01T02:24:06.088 DEBUG:teuthology.orchestra.run.vm06:> sudo mkdir -p /etc/ceph && sudo chmod 0755 /etc/ceph && sudo tee /etc/ceph/ceph.conf && sudo chmod 0644 /etc/ceph/ceph.conf > /dev/null 2026-04-01T02:24:06.089 DEBUG:teuthology.orchestra.run.vm08:> sudo mkdir -p /etc/ceph && sudo chmod 0755 /etc/ceph && sudo tee /etc/ceph/ceph.conf && sudo chmod 0644 /etc/ceph/ceph.conf > /dev/null 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout:[global] 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: chdir = "" 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: pid file = /var/run/ceph/$cluster-$name.pid 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: auth supported = cephx 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: filestore xattr use omap = true 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: mon clock drift allowed = 1.000 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: osd crush chooseleaf type = 0 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: auth debug = true 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: ms die on old message = true 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: ms die on bug = true 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: mon max pg per osd = 10000 # >= luminous 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: mon pg warn max object skew = 0 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: # disable pg_autoscaler by default for new pools 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: osd_pool_default_pg_autoscale_mode = off 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: osd pool default size = 2 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: mon osd allow primary affinity = true 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: mon osd allow pg remap = true 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: mon warn on legacy crush tunables = false 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: mon warn on crush straw calc version zero = false 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: mon warn on no sortbitwise = false 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: mon warn on osd down out interval zero = false 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: mon warn on too few osds = false 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: mon_warn_on_pool_pg_num_not_power_of_two = false 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: mon_warn_on_pool_no_redundancy = false 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: mon_allow_pool_size_one = true 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: osd pool default erasure code profile = plugin=isa technique=reed_sol_van k=2 m=1 crush-failure-domain=osd 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: osd default data pool replay window = 5 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: mon allow pool delete = true 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: mon cluster log file level = debug 2026-04-01T02:24:06.128 INFO:teuthology.orchestra.run.vm08.stdout: debug asserts on shutdown = true 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: mon health detail to clog = false 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: mon host = "192.168.123.103,[v2:192.168.123.103:3301,v1:192.168.123.103:6790],192.168.123.106" 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd_max_pg_log_entries = 10 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd_min_pg_log_entries = 10 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: fsid = 1338c6ab-9330-4cd3-91bf-71d5668f30ea 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout:[osd] 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd journal size = 100 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd scrub load threshold = 5.0 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd scrub max interval = 600 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd mclock profile = high_recovery_ops 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd recover clone overlap = true 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd recovery max chunk = 1048576 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd debug shutdown = true 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd debug op order = true 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd debug verify stray on activate = true 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd debug trim objects = true 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd open classes on start = true 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd debug pg log writeout = true 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd deep scrub update digest min age = 30 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd map max advance = 10 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: journal zero on create = true 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: filestore ondisk finisher threads = 3 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: filestore apply finisher threads = 3 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: bdev debug aio = true 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd debug misdirected ops = true 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: bdev async discard = True 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: bdev enable discard = True 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: bluestore allocator = bitmap 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: bluestore block size = 96636764160 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: bluestore fsck on mount = True 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: debug bluefs = 1/20 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: debug bluestore = 1/20 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: debug ms = 1 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: debug osd = 20 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: debug rocksdb = 4/10 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: mon osd backfillfull_ratio = 0.85 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: mon osd full ratio = 0.9 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: mon osd nearfull ratio = 0.8 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd failsafe full ratio = 0.95 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd mclock iops capacity threshold hdd = 49000 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd objectstore = bluestore 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: osd shutdown pgref assert = True 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout:[mgr] 2026-04-01T02:24:06.129 INFO:teuthology.orchestra.run.vm08.stdout: debug ms = 1 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: debug mgr = 20 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: debug mon = 20 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: debug auth = 20 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: mon reweight min pgs per osd = 4 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: mon reweight min bytes per osd = 10 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: mgr/telemetry/nag = false 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout:[mon] 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: debug ms = 1 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: debug mon = 20 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: debug paxos = 20 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: debug auth = 20 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: mon data avail warn = 5 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: mon mgr mkfs grace = 240 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: mon reweight min pgs per osd = 4 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: mon osd reporter subtree level = osd 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: mon osd prime pg temp = true 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: mon reweight min bytes per osd = 10 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: # rotate auth tickets quickly to exercise renewal paths 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: auth mon ticket ttl = 660 # 11m 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: auth service ticket ttl = 240 # 4m 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: # don't complain about insecure global_id in the test suite 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: mon_warn_on_insecure_global_id_reclaim = false 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: mon_warn_on_insecure_global_id_reclaim_allowed = false 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: # 1m isn't quite enough 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: mon_down_mkfs_grace = 2m 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: mon_warn_on_filestore_osds = false 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout:[client] 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: rgw cache enabled = true 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: rgw enable ops log = true 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: rgw enable usage log = true 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: log file = /var/log/ceph/$cluster-$name.$pid.log 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: admin socket = /var/run/ceph/$cluster-$name.$pid.asok 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: debug rgw = 20 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: debug rgw dedup = 20 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: setgroup = ceph 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout: setuser = ceph 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout:[mon.a] 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout:[mon.c] 2026-04-01T02:24:06.130 INFO:teuthology.orchestra.run.vm08.stdout:[mon.b] 2026-04-01T02:24:06.133 INFO:teuthology.orchestra.run.vm06.stdout:[global] 2026-04-01T02:24:06.133 INFO:teuthology.orchestra.run.vm06.stdout: chdir = "" 2026-04-01T02:24:06.133 INFO:teuthology.orchestra.run.vm06.stdout: pid file = /var/run/ceph/$cluster-$name.pid 2026-04-01T02:24:06.133 INFO:teuthology.orchestra.run.vm06.stdout: auth supported = cephx 2026-04-01T02:24:06.133 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.133 INFO:teuthology.orchestra.run.vm06.stdout: filestore xattr use omap = true 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon clock drift allowed = 1.000 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: osd crush chooseleaf type = 0 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: auth debug = true 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: ms die on old message = true 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: ms die on bug = true 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon max pg per osd = 10000 # >= luminous 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon pg warn max object skew = 0 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: # disable pg_autoscaler by default for new pools 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: osd_pool_default_pg_autoscale_mode = off 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: osd pool default size = 2 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon osd allow primary affinity = true 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon osd allow pg remap = true 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon warn on legacy crush tunables = false 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon warn on crush straw calc version zero = false 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon warn on no sortbitwise = false 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon warn on osd down out interval zero = false 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon warn on too few osds = false 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon_warn_on_pool_pg_num_not_power_of_two = false 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon_warn_on_pool_no_redundancy = false 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon_allow_pool_size_one = true 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: osd pool default erasure code profile = plugin=isa technique=reed_sol_van k=2 m=1 crush-failure-domain=osd 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: osd default data pool replay window = 5 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon allow pool delete = true 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon cluster log file level = debug 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: debug asserts on shutdown = true 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon health detail to clog = false 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: mon host = "192.168.123.103,[v2:192.168.123.103:3301,v1:192.168.123.103:6790],192.168.123.106" 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: osd_max_pg_log_entries = 10 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: osd_min_pg_log_entries = 10 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: fsid = 1338c6ab-9330-4cd3-91bf-71d5668f30ea 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout:[osd] 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: osd journal size = 100 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: osd scrub load threshold = 5.0 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: osd scrub max interval = 600 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: osd mclock profile = high_recovery_ops 2026-04-01T02:24:06.134 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: osd recover clone overlap = true 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: osd recovery max chunk = 1048576 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: osd debug shutdown = true 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: osd debug op order = true 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: osd debug verify stray on activate = true 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: osd debug trim objects = true 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: osd open classes on start = true 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: osd debug pg log writeout = true 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: osd deep scrub update digest min age = 30 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: osd map max advance = 10 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: journal zero on create = true 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: filestore ondisk finisher threads = 3 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: filestore apply finisher threads = 3 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: bdev debug aio = true 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: osd debug misdirected ops = true 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: bdev async discard = True 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: bdev enable discard = True 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: bluestore allocator = bitmap 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: bluestore block size = 96636764160 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: bluestore fsck on mount = True 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: debug bluefs = 1/20 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: debug bluestore = 1/20 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: debug ms = 1 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: debug osd = 20 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: debug rocksdb = 4/10 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: mon osd backfillfull_ratio = 0.85 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: mon osd full ratio = 0.9 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: mon osd nearfull ratio = 0.8 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: osd failsafe full ratio = 0.95 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: osd mclock iops capacity threshold hdd = 49000 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: osd objectstore = bluestore 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: osd shutdown pgref assert = True 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout:[mgr] 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: debug ms = 1 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: debug mgr = 20 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: debug mon = 20 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: debug auth = 20 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: mon reweight min pgs per osd = 4 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: mon reweight min bytes per osd = 10 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: mgr/telemetry/nag = false 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout:[mon] 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: debug ms = 1 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: debug mon = 20 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: debug paxos = 20 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: debug auth = 20 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: mon data avail warn = 5 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: mon mgr mkfs grace = 240 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: mon reweight min pgs per osd = 4 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: mon osd reporter subtree level = osd 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: mon osd prime pg temp = true 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: mon reweight min bytes per osd = 10 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: # rotate auth tickets quickly to exercise renewal paths 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: auth mon ticket ttl = 660 # 11m 2026-04-01T02:24:06.135 INFO:teuthology.orchestra.run.vm06.stdout: auth service ticket ttl = 240 # 4m 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: # don't complain about insecure global_id in the test suite 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: mon_warn_on_insecure_global_id_reclaim = false 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: mon_warn_on_insecure_global_id_reclaim_allowed = false 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: # 1m isn't quite enough 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: mon_down_mkfs_grace = 2m 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: mon_warn_on_filestore_osds = false 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout:[client] 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: rgw cache enabled = true 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: rgw enable ops log = true 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: rgw enable usage log = true 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: log file = /var/log/ceph/$cluster-$name.$pid.log 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: admin socket = /var/run/ceph/$cluster-$name.$pid.asok 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: debug rgw = 20 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: debug rgw dedup = 20 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: setgroup = ceph 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout: setuser = ceph 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout:[mon.a] 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout:[mon.c] 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm06.stdout:[mon.b] 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout:[global] 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: chdir = "" 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: pid file = /var/run/ceph/$cluster-$name.pid 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: auth supported = cephx 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: filestore xattr use omap = true 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: mon clock drift allowed = 1.000 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: osd crush chooseleaf type = 0 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: auth debug = true 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: ms die on old message = true 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: ms die on bug = true 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: mon max pg per osd = 10000 # >= luminous 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: mon pg warn max object skew = 0 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: # disable pg_autoscaler by default for new pools 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: osd_pool_default_pg_autoscale_mode = off 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: osd pool default size = 2 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: mon osd allow primary affinity = true 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: mon osd allow pg remap = true 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: mon warn on legacy crush tunables = false 2026-04-01T02:24:06.136 INFO:teuthology.orchestra.run.vm03.stdout: mon warn on crush straw calc version zero = false 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: mon warn on no sortbitwise = false 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: mon warn on osd down out interval zero = false 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: mon warn on too few osds = false 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: mon_warn_on_pool_pg_num_not_power_of_two = false 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: mon_warn_on_pool_no_redundancy = false 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: mon_allow_pool_size_one = true 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd pool default erasure code profile = plugin=isa technique=reed_sol_van k=2 m=1 crush-failure-domain=osd 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd default data pool replay window = 5 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: mon allow pool delete = true 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: mon cluster log file level = debug 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: debug asserts on shutdown = true 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: mon health detail to clog = false 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: mon host = "192.168.123.103,[v2:192.168.123.103:3301,v1:192.168.123.103:6790],192.168.123.106" 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd_max_pg_log_entries = 10 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd_min_pg_log_entries = 10 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: fsid = 1338c6ab-9330-4cd3-91bf-71d5668f30ea 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout:[osd] 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd journal size = 100 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd scrub load threshold = 5.0 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd scrub max interval = 600 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd mclock profile = high_recovery_ops 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd recover clone overlap = true 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd recovery max chunk = 1048576 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd debug shutdown = true 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd debug op order = true 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd debug verify stray on activate = true 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd debug trim objects = true 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd open classes on start = true 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd debug pg log writeout = true 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd deep scrub update digest min age = 30 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd map max advance = 10 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: journal zero on create = true 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: filestore ondisk finisher threads = 3 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: filestore apply finisher threads = 3 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: bdev debug aio = true 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd debug misdirected ops = true 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: bdev async discard = True 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: bdev enable discard = True 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: bluestore allocator = bitmap 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: bluestore block size = 96636764160 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: bluestore fsck on mount = True 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: debug bluefs = 1/20 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: debug bluestore = 1/20 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: debug ms = 1 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: debug osd = 20 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: debug rocksdb = 4/10 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: mon osd backfillfull_ratio = 0.85 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: mon osd full ratio = 0.9 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: mon osd nearfull ratio = 0.8 2026-04-01T02:24:06.137 INFO:teuthology.orchestra.run.vm03.stdout: osd failsafe full ratio = 0.95 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: osd mclock iops capacity threshold hdd = 49000 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: osd objectstore = bluestore 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: osd shutdown pgref assert = True 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout:[mgr] 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: debug ms = 1 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: debug mgr = 20 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: debug mon = 20 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: debug auth = 20 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: mon reweight min pgs per osd = 4 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: mon reweight min bytes per osd = 10 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: mgr/telemetry/nag = false 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout:[mon] 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: debug ms = 1 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: debug mon = 20 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: debug paxos = 20 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: debug auth = 20 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: mon data avail warn = 5 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: mon mgr mkfs grace = 240 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: mon reweight min pgs per osd = 4 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: mon osd reporter subtree level = osd 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: mon osd prime pg temp = true 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: mon reweight min bytes per osd = 10 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: # rotate auth tickets quickly to exercise renewal paths 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: auth mon ticket ttl = 660 # 11m 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: auth service ticket ttl = 240 # 4m 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: # don't complain about insecure global_id in the test suite 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: mon_warn_on_insecure_global_id_reclaim = false 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: mon_warn_on_insecure_global_id_reclaim_allowed = false 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: # 1m isn't quite enough 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: mon_down_mkfs_grace = 2m 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: mon_warn_on_filestore_osds = false 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout:[client] 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: rgw cache enabled = true 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: rgw enable ops log = true 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: rgw enable usage log = true 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: log file = /var/log/ceph/$cluster-$name.$pid.log 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: admin socket = /var/run/ceph/$cluster-$name.$pid.asok 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: debug rgw = 20 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: debug rgw dedup = 20 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: setgroup = ceph 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout: setuser = ceph 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout:[mon.a] 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout:[mon.c] 2026-04-01T02:24:06.138 INFO:teuthology.orchestra.run.vm03.stdout:[mon.b] 2026-04-01T02:24:06.147 INFO:tasks.ceph:Creating admin key on mon.a... 2026-04-01T02:24:06.148 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool --gen-key --name=client.admin --cap mon 'allow *' --cap osd 'allow *' --cap mds 'allow *' --cap mgr 'allow *' /etc/ceph/ceph.keyring 2026-04-01T02:24:06.201 INFO:tasks.ceph:Copying monmap to all nodes... 2026-04-01T02:24:06.201 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:06.201 DEBUG:teuthology.orchestra.run.vm03:> dd if=/etc/ceph/ceph.keyring of=/dev/stdout 2026-04-01T02:24:06.218 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:06.218 DEBUG:teuthology.orchestra.run.vm03:> dd if=/home/ubuntu/cephtest/ceph.monmap of=/dev/stdout 2026-04-01T02:24:06.275 INFO:tasks.ceph:Sending monmap to node ubuntu@vm03.local 2026-04-01T02:24:06.275 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:06.275 DEBUG:teuthology.orchestra.run.vm03:> sudo dd of=/etc/ceph/ceph.keyring 2026-04-01T02:24:06.275 DEBUG:teuthology.orchestra.run.vm03:> sudo chmod 0644 /etc/ceph/ceph.keyring 2026-04-01T02:24:06.357 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:06.357 DEBUG:teuthology.orchestra.run.vm03:> dd of=/home/ubuntu/cephtest/ceph.monmap 2026-04-01T02:24:06.414 INFO:tasks.ceph:Sending monmap to node ubuntu@vm06.local 2026-04-01T02:24:06.414 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:06.414 DEBUG:teuthology.orchestra.run.vm06:> sudo dd of=/etc/ceph/ceph.keyring 2026-04-01T02:24:06.414 DEBUG:teuthology.orchestra.run.vm06:> sudo chmod 0644 /etc/ceph/ceph.keyring 2026-04-01T02:24:06.450 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:06.450 DEBUG:teuthology.orchestra.run.vm06:> dd of=/home/ubuntu/cephtest/ceph.monmap 2026-04-01T02:24:06.510 INFO:tasks.ceph:Sending monmap to node ubuntu@vm08.local 2026-04-01T02:24:06.510 DEBUG:teuthology.orchestra.run.vm08:> set -ex 2026-04-01T02:24:06.510 DEBUG:teuthology.orchestra.run.vm08:> sudo dd of=/etc/ceph/ceph.keyring 2026-04-01T02:24:06.510 DEBUG:teuthology.orchestra.run.vm08:> sudo chmod 0644 /etc/ceph/ceph.keyring 2026-04-01T02:24:06.544 DEBUG:teuthology.orchestra.run.vm08:> set -ex 2026-04-01T02:24:06.544 DEBUG:teuthology.orchestra.run.vm08:> dd of=/home/ubuntu/cephtest/ceph.monmap 2026-04-01T02:24:06.600 INFO:tasks.ceph:Setting up mon nodes... 2026-04-01T02:24:06.601 INFO:tasks.ceph:Setting up mgr nodes... 2026-04-01T02:24:06.601 DEBUG:teuthology.orchestra.run.vm03:> sudo mkdir -p /var/lib/ceph/mgr/ceph-y && sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool --create-keyring --gen-key --name=mgr.y /var/lib/ceph/mgr/ceph-y/keyring 2026-04-01T02:24:06.655 INFO:teuthology.orchestra.run.vm03.stdout:creating /var/lib/ceph/mgr/ceph-y/keyring 2026-04-01T02:24:06.658 DEBUG:teuthology.orchestra.run.vm06:> sudo mkdir -p /var/lib/ceph/mgr/ceph-x && sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool --create-keyring --gen-key --name=mgr.x /var/lib/ceph/mgr/ceph-x/keyring 2026-04-01T02:24:06.708 INFO:teuthology.orchestra.run.vm06.stdout:creating /var/lib/ceph/mgr/ceph-x/keyring 2026-04-01T02:24:06.710 INFO:tasks.ceph:Setting up mds nodes... 2026-04-01T02:24:06.710 INFO:tasks.ceph_client:Setting up client nodes... 2026-04-01T02:24:06.710 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool --create-keyring --gen-key --name=client.0 /etc/ceph/ceph.client.0.keyring && sudo chmod 0644 /etc/ceph/ceph.client.0.keyring 2026-04-01T02:24:06.754 INFO:teuthology.orchestra.run.vm03.stdout:creating /etc/ceph/ceph.client.0.keyring 2026-04-01T02:24:06.769 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool --create-keyring --gen-key --name=client.1 /etc/ceph/ceph.client.1.keyring && sudo chmod 0644 /etc/ceph/ceph.client.1.keyring 2026-04-01T02:24:06.805 INFO:teuthology.orchestra.run.vm06.stdout:creating /etc/ceph/ceph.client.1.keyring 2026-04-01T02:24:06.817 DEBUG:teuthology.orchestra.run.vm08:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool --create-keyring --gen-key --name=client.2 /etc/ceph/ceph.client.2.keyring && sudo chmod 0644 /etc/ceph/ceph.client.2.keyring 2026-04-01T02:24:06.853 INFO:teuthology.orchestra.run.vm08.stdout:creating /etc/ceph/ceph.client.2.keyring 2026-04-01T02:24:06.865 INFO:tasks.ceph:Running mkfs on osd nodes... 2026-04-01T02:24:06.866 INFO:tasks.ceph:ctx.disk_config.remote_to_roles_to_dev: {Remote(name='ubuntu@vm03.local'): {'osd.0': '/dev/vg_nvme/lv_1', 'osd.1': '/dev/vg_nvme/lv_2', 'osd.2': '/dev/vg_nvme/lv_3', 'osd.3': '/dev/vg_nvme/lv_4'}, Remote(name='ubuntu@vm06.local'): {'osd.4': '/dev/vg_nvme/lv_1', 'osd.5': '/dev/vg_nvme/lv_2', 'osd.6': '/dev/vg_nvme/lv_3', 'osd.7': '/dev/vg_nvme/lv_4'}} 2026-04-01T02:24:06.866 DEBUG:teuthology.orchestra.run.vm03:> sudo mkdir -p /var/lib/ceph/osd/ceph-0 2026-04-01T02:24:06.895 INFO:tasks.ceph:roles_to_devs: {'osd.0': '/dev/vg_nvme/lv_1', 'osd.1': '/dev/vg_nvme/lv_2', 'osd.2': '/dev/vg_nvme/lv_3', 'osd.3': '/dev/vg_nvme/lv_4'} 2026-04-01T02:24:06.895 INFO:tasks.ceph:role: osd.0 2026-04-01T02:24:06.895 INFO:tasks.ceph:['mkfs.xfs', '-f', '-i', 'size=2048'] on /dev/vg_nvme/lv_1 on ubuntu@vm03.local 2026-04-01T02:24:06.896 DEBUG:teuthology.orchestra.run.vm03:> yes | sudo mkfs.xfs -f -i size=2048 /dev/vg_nvme/lv_1 2026-04-01T02:24:06.964 INFO:teuthology.orchestra.run.vm03.stdout:meta-data=/dev/vg_nvme/lv_1 isize=2048 agcount=4, agsize=1310464 blks 2026-04-01T02:24:06.964 INFO:teuthology.orchestra.run.vm03.stdout: = sectsz=512 attr=2, projid32bit=1 2026-04-01T02:24:06.964 INFO:teuthology.orchestra.run.vm03.stdout: = crc=1 finobt=1, sparse=1, rmapbt=0 2026-04-01T02:24:06.964 INFO:teuthology.orchestra.run.vm03.stdout: = reflink=1 bigtime=1 inobtcount=1 nrext64=0 2026-04-01T02:24:06.964 INFO:teuthology.orchestra.run.vm03.stdout:data = bsize=4096 blocks=5241856, imaxpct=25 2026-04-01T02:24:06.964 INFO:teuthology.orchestra.run.vm03.stdout: = sunit=0 swidth=0 blks 2026-04-01T02:24:06.964 INFO:teuthology.orchestra.run.vm03.stdout:naming =version 2 bsize=4096 ascii-ci=0, ftype=1 2026-04-01T02:24:06.964 INFO:teuthology.orchestra.run.vm03.stdout:log =internal log bsize=4096 blocks=16384, version=2 2026-04-01T02:24:06.964 INFO:teuthology.orchestra.run.vm03.stdout: = sectsz=512 sunit=0 blks, lazy-count=1 2026-04-01T02:24:06.964 INFO:teuthology.orchestra.run.vm03.stdout:realtime =none extsz=4096 blocks=0, rtextents=0 2026-04-01T02:24:06.969 INFO:teuthology.orchestra.run.vm03.stdout:Discarding blocks...Done. 2026-04-01T02:24:06.974 INFO:tasks.ceph:mount /dev/vg_nvme/lv_1 on ubuntu@vm03.local -o noatime 2026-04-01T02:24:06.974 DEBUG:teuthology.orchestra.run.vm03:> sudo mount -t xfs -o noatime /dev/vg_nvme/lv_1 /var/lib/ceph/osd/ceph-0 2026-04-01T02:24:07.046 DEBUG:teuthology.orchestra.run.vm03:> sudo /sbin/restorecon /var/lib/ceph/osd/ceph-0 2026-04-01T02:24:07.113 DEBUG:teuthology.orchestra.run.vm03:> sudo mkdir -p /var/lib/ceph/osd/ceph-1 2026-04-01T02:24:07.177 INFO:tasks.ceph:roles_to_devs: {'osd.0': '/dev/vg_nvme/lv_1', 'osd.1': '/dev/vg_nvme/lv_2', 'osd.2': '/dev/vg_nvme/lv_3', 'osd.3': '/dev/vg_nvme/lv_4'} 2026-04-01T02:24:07.177 INFO:tasks.ceph:role: osd.1 2026-04-01T02:24:07.177 INFO:tasks.ceph:['mkfs.xfs', '-f', '-i', 'size=2048'] on /dev/vg_nvme/lv_2 on ubuntu@vm03.local 2026-04-01T02:24:07.177 DEBUG:teuthology.orchestra.run.vm03:> yes | sudo mkfs.xfs -f -i size=2048 /dev/vg_nvme/lv_2 2026-04-01T02:24:07.244 INFO:teuthology.orchestra.run.vm03.stdout:meta-data=/dev/vg_nvme/lv_2 isize=2048 agcount=4, agsize=1310464 blks 2026-04-01T02:24:07.244 INFO:teuthology.orchestra.run.vm03.stdout: = sectsz=512 attr=2, projid32bit=1 2026-04-01T02:24:07.244 INFO:teuthology.orchestra.run.vm03.stdout: = crc=1 finobt=1, sparse=1, rmapbt=0 2026-04-01T02:24:07.244 INFO:teuthology.orchestra.run.vm03.stdout: = reflink=1 bigtime=1 inobtcount=1 nrext64=0 2026-04-01T02:24:07.244 INFO:teuthology.orchestra.run.vm03.stdout:data = bsize=4096 blocks=5241856, imaxpct=25 2026-04-01T02:24:07.244 INFO:teuthology.orchestra.run.vm03.stdout: = sunit=0 swidth=0 blks 2026-04-01T02:24:07.244 INFO:teuthology.orchestra.run.vm03.stdout:naming =version 2 bsize=4096 ascii-ci=0, ftype=1 2026-04-01T02:24:07.244 INFO:teuthology.orchestra.run.vm03.stdout:log =internal log bsize=4096 blocks=16384, version=2 2026-04-01T02:24:07.244 INFO:teuthology.orchestra.run.vm03.stdout: = sectsz=512 sunit=0 blks, lazy-count=1 2026-04-01T02:24:07.244 INFO:teuthology.orchestra.run.vm03.stdout:realtime =none extsz=4096 blocks=0, rtextents=0 2026-04-01T02:24:07.248 INFO:teuthology.orchestra.run.vm03.stdout:Discarding blocks...Done. 2026-04-01T02:24:07.251 INFO:tasks.ceph:mount /dev/vg_nvme/lv_2 on ubuntu@vm03.local -o noatime 2026-04-01T02:24:07.251 DEBUG:teuthology.orchestra.run.vm03:> sudo mount -t xfs -o noatime /dev/vg_nvme/lv_2 /var/lib/ceph/osd/ceph-1 2026-04-01T02:24:07.325 DEBUG:teuthology.orchestra.run.vm03:> sudo /sbin/restorecon /var/lib/ceph/osd/ceph-1 2026-04-01T02:24:07.395 DEBUG:teuthology.orchestra.run.vm03:> sudo mkdir -p /var/lib/ceph/osd/ceph-2 2026-04-01T02:24:07.461 INFO:tasks.ceph:roles_to_devs: {'osd.0': '/dev/vg_nvme/lv_1', 'osd.1': '/dev/vg_nvme/lv_2', 'osd.2': '/dev/vg_nvme/lv_3', 'osd.3': '/dev/vg_nvme/lv_4'} 2026-04-01T02:24:07.461 INFO:tasks.ceph:role: osd.2 2026-04-01T02:24:07.461 INFO:tasks.ceph:['mkfs.xfs', '-f', '-i', 'size=2048'] on /dev/vg_nvme/lv_3 on ubuntu@vm03.local 2026-04-01T02:24:07.461 DEBUG:teuthology.orchestra.run.vm03:> yes | sudo mkfs.xfs -f -i size=2048 /dev/vg_nvme/lv_3 2026-04-01T02:24:07.529 INFO:teuthology.orchestra.run.vm03.stdout:meta-data=/dev/vg_nvme/lv_3 isize=2048 agcount=4, agsize=1310464 blks 2026-04-01T02:24:07.529 INFO:teuthology.orchestra.run.vm03.stdout: = sectsz=512 attr=2, projid32bit=1 2026-04-01T02:24:07.529 INFO:teuthology.orchestra.run.vm03.stdout: = crc=1 finobt=1, sparse=1, rmapbt=0 2026-04-01T02:24:07.529 INFO:teuthology.orchestra.run.vm03.stdout: = reflink=1 bigtime=1 inobtcount=1 nrext64=0 2026-04-01T02:24:07.529 INFO:teuthology.orchestra.run.vm03.stdout:data = bsize=4096 blocks=5241856, imaxpct=25 2026-04-01T02:24:07.529 INFO:teuthology.orchestra.run.vm03.stdout: = sunit=0 swidth=0 blks 2026-04-01T02:24:07.529 INFO:teuthology.orchestra.run.vm03.stdout:naming =version 2 bsize=4096 ascii-ci=0, ftype=1 2026-04-01T02:24:07.529 INFO:teuthology.orchestra.run.vm03.stdout:log =internal log bsize=4096 blocks=16384, version=2 2026-04-01T02:24:07.529 INFO:teuthology.orchestra.run.vm03.stdout: = sectsz=512 sunit=0 blks, lazy-count=1 2026-04-01T02:24:07.529 INFO:teuthology.orchestra.run.vm03.stdout:realtime =none extsz=4096 blocks=0, rtextents=0 2026-04-01T02:24:07.533 INFO:teuthology.orchestra.run.vm03.stdout:Discarding blocks...Done. 2026-04-01T02:24:07.537 INFO:tasks.ceph:mount /dev/vg_nvme/lv_3 on ubuntu@vm03.local -o noatime 2026-04-01T02:24:07.537 DEBUG:teuthology.orchestra.run.vm03:> sudo mount -t xfs -o noatime /dev/vg_nvme/lv_3 /var/lib/ceph/osd/ceph-2 2026-04-01T02:24:07.610 DEBUG:teuthology.orchestra.run.vm03:> sudo /sbin/restorecon /var/lib/ceph/osd/ceph-2 2026-04-01T02:24:07.677 DEBUG:teuthology.orchestra.run.vm03:> sudo mkdir -p /var/lib/ceph/osd/ceph-3 2026-04-01T02:24:07.745 INFO:tasks.ceph:roles_to_devs: {'osd.0': '/dev/vg_nvme/lv_1', 'osd.1': '/dev/vg_nvme/lv_2', 'osd.2': '/dev/vg_nvme/lv_3', 'osd.3': '/dev/vg_nvme/lv_4'} 2026-04-01T02:24:07.745 INFO:tasks.ceph:role: osd.3 2026-04-01T02:24:07.745 INFO:tasks.ceph:['mkfs.xfs', '-f', '-i', 'size=2048'] on /dev/vg_nvme/lv_4 on ubuntu@vm03.local 2026-04-01T02:24:07.745 DEBUG:teuthology.orchestra.run.vm03:> yes | sudo mkfs.xfs -f -i size=2048 /dev/vg_nvme/lv_4 2026-04-01T02:24:07.811 INFO:teuthology.orchestra.run.vm03.stdout:meta-data=/dev/vg_nvme/lv_4 isize=2048 agcount=4, agsize=1310464 blks 2026-04-01T02:24:07.811 INFO:teuthology.orchestra.run.vm03.stdout: = sectsz=512 attr=2, projid32bit=1 2026-04-01T02:24:07.811 INFO:teuthology.orchestra.run.vm03.stdout: = crc=1 finobt=1, sparse=1, rmapbt=0 2026-04-01T02:24:07.811 INFO:teuthology.orchestra.run.vm03.stdout: = reflink=1 bigtime=1 inobtcount=1 nrext64=0 2026-04-01T02:24:07.811 INFO:teuthology.orchestra.run.vm03.stdout:data = bsize=4096 blocks=5241856, imaxpct=25 2026-04-01T02:24:07.811 INFO:teuthology.orchestra.run.vm03.stdout: = sunit=0 swidth=0 blks 2026-04-01T02:24:07.811 INFO:teuthology.orchestra.run.vm03.stdout:naming =version 2 bsize=4096 ascii-ci=0, ftype=1 2026-04-01T02:24:07.811 INFO:teuthology.orchestra.run.vm03.stdout:log =internal log bsize=4096 blocks=16384, version=2 2026-04-01T02:24:07.811 INFO:teuthology.orchestra.run.vm03.stdout: = sectsz=512 sunit=0 blks, lazy-count=1 2026-04-01T02:24:07.811 INFO:teuthology.orchestra.run.vm03.stdout:realtime =none extsz=4096 blocks=0, rtextents=0 2026-04-01T02:24:07.816 INFO:teuthology.orchestra.run.vm03.stdout:Discarding blocks...Done. 2026-04-01T02:24:07.823 INFO:tasks.ceph:mount /dev/vg_nvme/lv_4 on ubuntu@vm03.local -o noatime 2026-04-01T02:24:07.823 DEBUG:teuthology.orchestra.run.vm03:> sudo mount -t xfs -o noatime /dev/vg_nvme/lv_4 /var/lib/ceph/osd/ceph-3 2026-04-01T02:24:07.898 DEBUG:teuthology.orchestra.run.vm03:> sudo /sbin/restorecon /var/lib/ceph/osd/ceph-3 2026-04-01T02:24:07.966 DEBUG:teuthology.orchestra.run.vm03:> sudo MALLOC_CHECK_=3 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-osd --no-mon-config --cluster ceph --mkfs --mkkey -i 0 --monmap /home/ubuntu/cephtest/ceph.monmap 2026-04-01T02:24:08.053 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:08.049+0000 7fb1a100c900 -1 auth: error reading file: /var/lib/ceph/osd/ceph-0/keyring: can't open /var/lib/ceph/osd/ceph-0/keyring: (2) No such file or directory 2026-04-01T02:24:08.053 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:08.050+0000 7fb1a100c900 -1 created new key in keyring /var/lib/ceph/osd/ceph-0/keyring 2026-04-01T02:24:08.053 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:08.050+0000 7fb1a100c900 -1 bdev(0x561a54c61800 /var/lib/ceph/osd/ceph-0/block) open stat got: (1) Operation not permitted 2026-04-01T02:24:08.053 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:08.050+0000 7fb1a100c900 -1 bluestore(/var/lib/ceph/osd/ceph-0) _read_fsid unparsable uuid 2026-04-01T02:24:08.762 DEBUG:teuthology.orchestra.run.vm03:> sudo chown -R ceph:ceph /var/lib/ceph/osd/ceph-0 2026-04-01T02:24:08.789 DEBUG:teuthology.orchestra.run.vm03:> sudo MALLOC_CHECK_=3 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-osd --no-mon-config --cluster ceph --mkfs --mkkey -i 1 --monmap /home/ubuntu/cephtest/ceph.monmap 2026-04-01T02:24:08.876 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:08.873+0000 7fa57d427900 -1 auth: error reading file: /var/lib/ceph/osd/ceph-1/keyring: can't open /var/lib/ceph/osd/ceph-1/keyring: (2) No such file or directory 2026-04-01T02:24:08.876 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:08.873+0000 7fa57d427900 -1 created new key in keyring /var/lib/ceph/osd/ceph-1/keyring 2026-04-01T02:24:08.876 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:08.873+0000 7fa57d427900 -1 bdev(0x560782ed3800 /var/lib/ceph/osd/ceph-1/block) open stat got: (1) Operation not permitted 2026-04-01T02:24:08.876 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:08.874+0000 7fa57d427900 -1 bluestore(/var/lib/ceph/osd/ceph-1) _read_fsid unparsable uuid 2026-04-01T02:24:09.546 DEBUG:teuthology.orchestra.run.vm03:> sudo chown -R ceph:ceph /var/lib/ceph/osd/ceph-1 2026-04-01T02:24:09.574 DEBUG:teuthology.orchestra.run.vm03:> sudo MALLOC_CHECK_=3 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-osd --no-mon-config --cluster ceph --mkfs --mkkey -i 2 --monmap /home/ubuntu/cephtest/ceph.monmap 2026-04-01T02:24:09.661 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:09.658+0000 7f5cacdc8900 -1 auth: error reading file: /var/lib/ceph/osd/ceph-2/keyring: can't open /var/lib/ceph/osd/ceph-2/keyring: (2) No such file or directory 2026-04-01T02:24:09.661 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:09.658+0000 7f5cacdc8900 -1 created new key in keyring /var/lib/ceph/osd/ceph-2/keyring 2026-04-01T02:24:09.661 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:09.658+0000 7f5cacdc8900 -1 bdev(0x5635aac93800 /var/lib/ceph/osd/ceph-2/block) open stat got: (1) Operation not permitted 2026-04-01T02:24:09.661 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:09.659+0000 7f5cacdc8900 -1 bluestore(/var/lib/ceph/osd/ceph-2) _read_fsid unparsable uuid 2026-04-01T02:24:10.337 DEBUG:teuthology.orchestra.run.vm03:> sudo chown -R ceph:ceph /var/lib/ceph/osd/ceph-2 2026-04-01T02:24:10.367 DEBUG:teuthology.orchestra.run.vm03:> sudo MALLOC_CHECK_=3 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-osd --no-mon-config --cluster ceph --mkfs --mkkey -i 3 --monmap /home/ubuntu/cephtest/ceph.monmap 2026-04-01T02:24:10.457 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:10.454+0000 7effebc82900 -1 auth: error reading file: /var/lib/ceph/osd/ceph-3/keyring: can't open /var/lib/ceph/osd/ceph-3/keyring: (2) No such file or directory 2026-04-01T02:24:10.457 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:10.454+0000 7effebc82900 -1 created new key in keyring /var/lib/ceph/osd/ceph-3/keyring 2026-04-01T02:24:10.457 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:10.454+0000 7effebc82900 -1 bdev(0x55bdccacd800 /var/lib/ceph/osd/ceph-3/block) open stat got: (1) Operation not permitted 2026-04-01T02:24:10.457 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:10.454+0000 7effebc82900 -1 bluestore(/var/lib/ceph/osd/ceph-3) _read_fsid unparsable uuid 2026-04-01T02:24:11.101 DEBUG:teuthology.orchestra.run.vm03:> sudo chown -R ceph:ceph /var/lib/ceph/osd/ceph-3 2026-04-01T02:24:11.129 DEBUG:teuthology.orchestra.run.vm06:> sudo mkdir -p /var/lib/ceph/osd/ceph-4 2026-04-01T02:24:11.157 INFO:tasks.ceph:roles_to_devs: {'osd.4': '/dev/vg_nvme/lv_1', 'osd.5': '/dev/vg_nvme/lv_2', 'osd.6': '/dev/vg_nvme/lv_3', 'osd.7': '/dev/vg_nvme/lv_4'} 2026-04-01T02:24:11.157 INFO:tasks.ceph:role: osd.4 2026-04-01T02:24:11.157 INFO:tasks.ceph:['mkfs.xfs', '-f', '-i', 'size=2048'] on /dev/vg_nvme/lv_1 on ubuntu@vm06.local 2026-04-01T02:24:11.157 DEBUG:teuthology.orchestra.run.vm06:> yes | sudo mkfs.xfs -f -i size=2048 /dev/vg_nvme/lv_1 2026-04-01T02:24:11.226 INFO:teuthology.orchestra.run.vm06.stdout:meta-data=/dev/vg_nvme/lv_1 isize=2048 agcount=4, agsize=1310464 blks 2026-04-01T02:24:11.226 INFO:teuthology.orchestra.run.vm06.stdout: = sectsz=512 attr=2, projid32bit=1 2026-04-01T02:24:11.226 INFO:teuthology.orchestra.run.vm06.stdout: = crc=1 finobt=1, sparse=1, rmapbt=0 2026-04-01T02:24:11.226 INFO:teuthology.orchestra.run.vm06.stdout: = reflink=1 bigtime=1 inobtcount=1 nrext64=0 2026-04-01T02:24:11.226 INFO:teuthology.orchestra.run.vm06.stdout:data = bsize=4096 blocks=5241856, imaxpct=25 2026-04-01T02:24:11.226 INFO:teuthology.orchestra.run.vm06.stdout: = sunit=0 swidth=0 blks 2026-04-01T02:24:11.226 INFO:teuthology.orchestra.run.vm06.stdout:naming =version 2 bsize=4096 ascii-ci=0, ftype=1 2026-04-01T02:24:11.226 INFO:teuthology.orchestra.run.vm06.stdout:log =internal log bsize=4096 blocks=16384, version=2 2026-04-01T02:24:11.227 INFO:teuthology.orchestra.run.vm06.stdout: = sectsz=512 sunit=0 blks, lazy-count=1 2026-04-01T02:24:11.227 INFO:teuthology.orchestra.run.vm06.stdout:realtime =none extsz=4096 blocks=0, rtextents=0 2026-04-01T02:24:11.231 INFO:teuthology.orchestra.run.vm06.stdout:Discarding blocks...Done. 2026-04-01T02:24:11.234 INFO:tasks.ceph:mount /dev/vg_nvme/lv_1 on ubuntu@vm06.local -o noatime 2026-04-01T02:24:11.234 DEBUG:teuthology.orchestra.run.vm06:> sudo mount -t xfs -o noatime /dev/vg_nvme/lv_1 /var/lib/ceph/osd/ceph-4 2026-04-01T02:24:11.308 DEBUG:teuthology.orchestra.run.vm06:> sudo /sbin/restorecon /var/lib/ceph/osd/ceph-4 2026-04-01T02:24:11.377 DEBUG:teuthology.orchestra.run.vm06:> sudo mkdir -p /var/lib/ceph/osd/ceph-5 2026-04-01T02:24:11.446 INFO:tasks.ceph:roles_to_devs: {'osd.4': '/dev/vg_nvme/lv_1', 'osd.5': '/dev/vg_nvme/lv_2', 'osd.6': '/dev/vg_nvme/lv_3', 'osd.7': '/dev/vg_nvme/lv_4'} 2026-04-01T02:24:11.446 INFO:tasks.ceph:role: osd.5 2026-04-01T02:24:11.446 INFO:tasks.ceph:['mkfs.xfs', '-f', '-i', 'size=2048'] on /dev/vg_nvme/lv_2 on ubuntu@vm06.local 2026-04-01T02:24:11.446 DEBUG:teuthology.orchestra.run.vm06:> yes | sudo mkfs.xfs -f -i size=2048 /dev/vg_nvme/lv_2 2026-04-01T02:24:11.512 INFO:teuthology.orchestra.run.vm06.stdout:meta-data=/dev/vg_nvme/lv_2 isize=2048 agcount=4, agsize=1310464 blks 2026-04-01T02:24:11.512 INFO:teuthology.orchestra.run.vm06.stdout: = sectsz=512 attr=2, projid32bit=1 2026-04-01T02:24:11.512 INFO:teuthology.orchestra.run.vm06.stdout: = crc=1 finobt=1, sparse=1, rmapbt=0 2026-04-01T02:24:11.513 INFO:teuthology.orchestra.run.vm06.stdout: = reflink=1 bigtime=1 inobtcount=1 nrext64=0 2026-04-01T02:24:11.513 INFO:teuthology.orchestra.run.vm06.stdout:data = bsize=4096 blocks=5241856, imaxpct=25 2026-04-01T02:24:11.513 INFO:teuthology.orchestra.run.vm06.stdout: = sunit=0 swidth=0 blks 2026-04-01T02:24:11.513 INFO:teuthology.orchestra.run.vm06.stdout:naming =version 2 bsize=4096 ascii-ci=0, ftype=1 2026-04-01T02:24:11.513 INFO:teuthology.orchestra.run.vm06.stdout:log =internal log bsize=4096 blocks=16384, version=2 2026-04-01T02:24:11.513 INFO:teuthology.orchestra.run.vm06.stdout: = sectsz=512 sunit=0 blks, lazy-count=1 2026-04-01T02:24:11.513 INFO:teuthology.orchestra.run.vm06.stdout:realtime =none extsz=4096 blocks=0, rtextents=0 2026-04-01T02:24:11.517 INFO:teuthology.orchestra.run.vm06.stdout:Discarding blocks...Done. 2026-04-01T02:24:11.519 INFO:tasks.ceph:mount /dev/vg_nvme/lv_2 on ubuntu@vm06.local -o noatime 2026-04-01T02:24:11.519 DEBUG:teuthology.orchestra.run.vm06:> sudo mount -t xfs -o noatime /dev/vg_nvme/lv_2 /var/lib/ceph/osd/ceph-5 2026-04-01T02:24:11.595 DEBUG:teuthology.orchestra.run.vm06:> sudo /sbin/restorecon /var/lib/ceph/osd/ceph-5 2026-04-01T02:24:11.665 DEBUG:teuthology.orchestra.run.vm06:> sudo mkdir -p /var/lib/ceph/osd/ceph-6 2026-04-01T02:24:11.731 INFO:tasks.ceph:roles_to_devs: {'osd.4': '/dev/vg_nvme/lv_1', 'osd.5': '/dev/vg_nvme/lv_2', 'osd.6': '/dev/vg_nvme/lv_3', 'osd.7': '/dev/vg_nvme/lv_4'} 2026-04-01T02:24:11.731 INFO:tasks.ceph:role: osd.6 2026-04-01T02:24:11.731 INFO:tasks.ceph:['mkfs.xfs', '-f', '-i', 'size=2048'] on /dev/vg_nvme/lv_3 on ubuntu@vm06.local 2026-04-01T02:24:11.731 DEBUG:teuthology.orchestra.run.vm06:> yes | sudo mkfs.xfs -f -i size=2048 /dev/vg_nvme/lv_3 2026-04-01T02:24:11.796 INFO:teuthology.orchestra.run.vm06.stdout:meta-data=/dev/vg_nvme/lv_3 isize=2048 agcount=4, agsize=1310464 blks 2026-04-01T02:24:11.797 INFO:teuthology.orchestra.run.vm06.stdout: = sectsz=512 attr=2, projid32bit=1 2026-04-01T02:24:11.797 INFO:teuthology.orchestra.run.vm06.stdout: = crc=1 finobt=1, sparse=1, rmapbt=0 2026-04-01T02:24:11.797 INFO:teuthology.orchestra.run.vm06.stdout: = reflink=1 bigtime=1 inobtcount=1 nrext64=0 2026-04-01T02:24:11.797 INFO:teuthology.orchestra.run.vm06.stdout:data = bsize=4096 blocks=5241856, imaxpct=25 2026-04-01T02:24:11.797 INFO:teuthology.orchestra.run.vm06.stdout: = sunit=0 swidth=0 blks 2026-04-01T02:24:11.797 INFO:teuthology.orchestra.run.vm06.stdout:naming =version 2 bsize=4096 ascii-ci=0, ftype=1 2026-04-01T02:24:11.797 INFO:teuthology.orchestra.run.vm06.stdout:log =internal log bsize=4096 blocks=16384, version=2 2026-04-01T02:24:11.797 INFO:teuthology.orchestra.run.vm06.stdout: = sectsz=512 sunit=0 blks, lazy-count=1 2026-04-01T02:24:11.797 INFO:teuthology.orchestra.run.vm06.stdout:realtime =none extsz=4096 blocks=0, rtextents=0 2026-04-01T02:24:11.802 INFO:teuthology.orchestra.run.vm06.stdout:Discarding blocks...Done. 2026-04-01T02:24:11.804 INFO:tasks.ceph:mount /dev/vg_nvme/lv_3 on ubuntu@vm06.local -o noatime 2026-04-01T02:24:11.804 DEBUG:teuthology.orchestra.run.vm06:> sudo mount -t xfs -o noatime /dev/vg_nvme/lv_3 /var/lib/ceph/osd/ceph-6 2026-04-01T02:24:11.875 DEBUG:teuthology.orchestra.run.vm06:> sudo /sbin/restorecon /var/lib/ceph/osd/ceph-6 2026-04-01T02:24:11.949 DEBUG:teuthology.orchestra.run.vm06:> sudo mkdir -p /var/lib/ceph/osd/ceph-7 2026-04-01T02:24:12.018 INFO:tasks.ceph:roles_to_devs: {'osd.4': '/dev/vg_nvme/lv_1', 'osd.5': '/dev/vg_nvme/lv_2', 'osd.6': '/dev/vg_nvme/lv_3', 'osd.7': '/dev/vg_nvme/lv_4'} 2026-04-01T02:24:12.018 INFO:tasks.ceph:role: osd.7 2026-04-01T02:24:12.018 INFO:tasks.ceph:['mkfs.xfs', '-f', '-i', 'size=2048'] on /dev/vg_nvme/lv_4 on ubuntu@vm06.local 2026-04-01T02:24:12.019 DEBUG:teuthology.orchestra.run.vm06:> yes | sudo mkfs.xfs -f -i size=2048 /dev/vg_nvme/lv_4 2026-04-01T02:24:12.085 INFO:teuthology.orchestra.run.vm06.stdout:meta-data=/dev/vg_nvme/lv_4 isize=2048 agcount=4, agsize=1310464 blks 2026-04-01T02:24:12.085 INFO:teuthology.orchestra.run.vm06.stdout: = sectsz=512 attr=2, projid32bit=1 2026-04-01T02:24:12.085 INFO:teuthology.orchestra.run.vm06.stdout: = crc=1 finobt=1, sparse=1, rmapbt=0 2026-04-01T02:24:12.085 INFO:teuthology.orchestra.run.vm06.stdout: = reflink=1 bigtime=1 inobtcount=1 nrext64=0 2026-04-01T02:24:12.085 INFO:teuthology.orchestra.run.vm06.stdout:data = bsize=4096 blocks=5241856, imaxpct=25 2026-04-01T02:24:12.085 INFO:teuthology.orchestra.run.vm06.stdout: = sunit=0 swidth=0 blks 2026-04-01T02:24:12.085 INFO:teuthology.orchestra.run.vm06.stdout:naming =version 2 bsize=4096 ascii-ci=0, ftype=1 2026-04-01T02:24:12.085 INFO:teuthology.orchestra.run.vm06.stdout:log =internal log bsize=4096 blocks=16384, version=2 2026-04-01T02:24:12.085 INFO:teuthology.orchestra.run.vm06.stdout: = sectsz=512 sunit=0 blks, lazy-count=1 2026-04-01T02:24:12.085 INFO:teuthology.orchestra.run.vm06.stdout:realtime =none extsz=4096 blocks=0, rtextents=0 2026-04-01T02:24:12.089 INFO:teuthology.orchestra.run.vm06.stdout:Discarding blocks...Done. 2026-04-01T02:24:12.091 INFO:tasks.ceph:mount /dev/vg_nvme/lv_4 on ubuntu@vm06.local -o noatime 2026-04-01T02:24:12.091 DEBUG:teuthology.orchestra.run.vm06:> sudo mount -t xfs -o noatime /dev/vg_nvme/lv_4 /var/lib/ceph/osd/ceph-7 2026-04-01T02:24:12.162 DEBUG:teuthology.orchestra.run.vm06:> sudo /sbin/restorecon /var/lib/ceph/osd/ceph-7 2026-04-01T02:24:12.234 DEBUG:teuthology.orchestra.run.vm06:> sudo MALLOC_CHECK_=3 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-osd --no-mon-config --cluster ceph --mkfs --mkkey -i 4 --monmap /home/ubuntu/cephtest/ceph.monmap 2026-04-01T02:24:12.323 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:12.320+0000 7fc6be3c6900 -1 auth: error reading file: /var/lib/ceph/osd/ceph-4/keyring: can't open /var/lib/ceph/osd/ceph-4/keyring: (2) No such file or directory 2026-04-01T02:24:12.323 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:12.320+0000 7fc6be3c6900 -1 created new key in keyring /var/lib/ceph/osd/ceph-4/keyring 2026-04-01T02:24:12.323 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:12.321+0000 7fc6be3c6900 -1 bdev(0x5575ff483800 /var/lib/ceph/osd/ceph-4/block) open stat got: (1) Operation not permitted 2026-04-01T02:24:12.323 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:12.321+0000 7fc6be3c6900 -1 bluestore(/var/lib/ceph/osd/ceph-4) _read_fsid unparsable uuid 2026-04-01T02:24:12.995 DEBUG:teuthology.orchestra.run.vm06:> sudo chown -R ceph:ceph /var/lib/ceph/osd/ceph-4 2026-04-01T02:24:13.023 DEBUG:teuthology.orchestra.run.vm06:> sudo MALLOC_CHECK_=3 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-osd --no-mon-config --cluster ceph --mkfs --mkkey -i 5 --monmap /home/ubuntu/cephtest/ceph.monmap 2026-04-01T02:24:13.106 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:13.103+0000 7f6e0d189900 -1 auth: error reading file: /var/lib/ceph/osd/ceph-5/keyring: can't open /var/lib/ceph/osd/ceph-5/keyring: (2) No such file or directory 2026-04-01T02:24:13.106 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:13.103+0000 7f6e0d189900 -1 created new key in keyring /var/lib/ceph/osd/ceph-5/keyring 2026-04-01T02:24:13.106 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:13.103+0000 7f6e0d189900 -1 bdev(0x5627f390d800 /var/lib/ceph/osd/ceph-5/block) open stat got: (1) Operation not permitted 2026-04-01T02:24:13.106 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:13.103+0000 7f6e0d189900 -1 bluestore(/var/lib/ceph/osd/ceph-5) _read_fsid unparsable uuid 2026-04-01T02:24:13.758 DEBUG:teuthology.orchestra.run.vm06:> sudo chown -R ceph:ceph /var/lib/ceph/osd/ceph-5 2026-04-01T02:24:13.825 DEBUG:teuthology.orchestra.run.vm06:> sudo MALLOC_CHECK_=3 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-osd --no-mon-config --cluster ceph --mkfs --mkkey -i 6 --monmap /home/ubuntu/cephtest/ceph.monmap 2026-04-01T02:24:13.910 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:13.908+0000 7fe6af578900 -1 auth: error reading file: /var/lib/ceph/osd/ceph-6/keyring: can't open /var/lib/ceph/osd/ceph-6/keyring: (2) No such file or directory 2026-04-01T02:24:13.911 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:13.908+0000 7fe6af578900 -1 created new key in keyring /var/lib/ceph/osd/ceph-6/keyring 2026-04-01T02:24:13.911 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:13.908+0000 7fe6af578900 -1 bdev(0x5648f8095800 /var/lib/ceph/osd/ceph-6/block) open stat got: (1) Operation not permitted 2026-04-01T02:24:13.911 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:13.908+0000 7fe6af578900 -1 bluestore(/var/lib/ceph/osd/ceph-6) _read_fsid unparsable uuid 2026-04-01T02:24:14.618 DEBUG:teuthology.orchestra.run.vm06:> sudo chown -R ceph:ceph /var/lib/ceph/osd/ceph-6 2026-04-01T02:24:14.647 DEBUG:teuthology.orchestra.run.vm06:> sudo MALLOC_CHECK_=3 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-osd --no-mon-config --cluster ceph --mkfs --mkkey -i 7 --monmap /home/ubuntu/cephtest/ceph.monmap 2026-04-01T02:24:14.744 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:14.741+0000 7f2e6341f900 -1 auth: error reading file: /var/lib/ceph/osd/ceph-7/keyring: can't open /var/lib/ceph/osd/ceph-7/keyring: (2) No such file or directory 2026-04-01T02:24:14.744 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:14.742+0000 7f2e6341f900 -1 created new key in keyring /var/lib/ceph/osd/ceph-7/keyring 2026-04-01T02:24:14.744 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:14.742+0000 7f2e6341f900 -1 bdev(0x561819e2b800 /var/lib/ceph/osd/ceph-7/block) open stat got: (1) Operation not permitted 2026-04-01T02:24:14.744 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:14.742+0000 7f2e6341f900 -1 bluestore(/var/lib/ceph/osd/ceph-7) _read_fsid unparsable uuid 2026-04-01T02:24:15.513 DEBUG:teuthology.orchestra.run.vm06:> sudo chown -R ceph:ceph /var/lib/ceph/osd/ceph-7 2026-04-01T02:24:15.538 INFO:tasks.ceph:Reading keys from all nodes... 2026-04-01T02:24:15.538 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:15.538 DEBUG:teuthology.orchestra.run.vm03:> sudo dd if=/var/lib/ceph/mgr/ceph-y/keyring of=/dev/stdout 2026-04-01T02:24:15.561 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:15.570 DEBUG:teuthology.orchestra.run.vm03:> sudo dd if=/var/lib/ceph/osd/ceph-0/keyring of=/dev/stdout 2026-04-01T02:24:15.627 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:15.627 DEBUG:teuthology.orchestra.run.vm03:> sudo dd if=/var/lib/ceph/osd/ceph-1/keyring of=/dev/stdout 2026-04-01T02:24:15.691 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:15.691 DEBUG:teuthology.orchestra.run.vm03:> sudo dd if=/var/lib/ceph/osd/ceph-2/keyring of=/dev/stdout 2026-04-01T02:24:15.758 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:15.758 DEBUG:teuthology.orchestra.run.vm03:> sudo dd if=/var/lib/ceph/osd/ceph-3/keyring of=/dev/stdout 2026-04-01T02:24:15.826 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:15.826 DEBUG:teuthology.orchestra.run.vm06:> sudo dd if=/var/lib/ceph/mgr/ceph-x/keyring of=/dev/stdout 2026-04-01T02:24:15.854 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:15.854 DEBUG:teuthology.orchestra.run.vm06:> sudo dd if=/var/lib/ceph/osd/ceph-4/keyring of=/dev/stdout 2026-04-01T02:24:15.919 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:15.919 DEBUG:teuthology.orchestra.run.vm06:> sudo dd if=/var/lib/ceph/osd/ceph-5/keyring of=/dev/stdout 2026-04-01T02:24:15.987 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:15.987 DEBUG:teuthology.orchestra.run.vm06:> sudo dd if=/var/lib/ceph/osd/ceph-6/keyring of=/dev/stdout 2026-04-01T02:24:16.056 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:16.056 DEBUG:teuthology.orchestra.run.vm06:> sudo dd if=/var/lib/ceph/osd/ceph-7/keyring of=/dev/stdout 2026-04-01T02:24:16.123 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:16.123 DEBUG:teuthology.orchestra.run.vm03:> dd if=/etc/ceph/ceph.client.0.keyring of=/dev/stdout 2026-04-01T02:24:16.142 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:16.142 DEBUG:teuthology.orchestra.run.vm06:> dd if=/etc/ceph/ceph.client.1.keyring of=/dev/stdout 2026-04-01T02:24:16.181 DEBUG:teuthology.orchestra.run.vm08:> set -ex 2026-04-01T02:24:16.181 DEBUG:teuthology.orchestra.run.vm08:> dd if=/etc/ceph/ceph.client.2.keyring of=/dev/stdout 2026-04-01T02:24:16.200 INFO:tasks.ceph:Adding keys to all mons... 2026-04-01T02:24:16.200 DEBUG:teuthology.orchestra.run.vm03:> sudo tee -a /etc/ceph/ceph.keyring 2026-04-01T02:24:16.202 DEBUG:teuthology.orchestra.run.vm06:> sudo tee -a /etc/ceph/ceph.keyring 2026-04-01T02:24:16.226 INFO:teuthology.orchestra.run.vm03.stdout:[mgr.y] 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout: key = AQDGgcxpO5wCJxAAL+jD/VeVlxMovOpecorXBw== 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout:[osd.0] 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout: key = AQDIgcxphtMOAxAACDqdU1JZ+c9NWn4h8QuUGg== 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout:[osd.1] 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout: key = AQDIgcxpnUUmNBAARamIxTvQ3d0OeO820vERmQ== 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout:[osd.2] 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout: key = AQDJgcxpqxZWJxAAS0H+yexy8yciHBF2mjJJMg== 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout:[osd.3] 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout: key = AQDKgcxpvEAnGxAAWxzgGIvtnxo8cIMDyYZUYw== 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout:[mgr.x] 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout: key = AQDGgcxpFhwdKhAAkDysmkA9CaoLWyS3dGjUMQ== 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout:[osd.4] 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout: key = AQDMgcxp+MstExAAwohfhmhpBnoevk5ZTwIZQA== 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout:[osd.5] 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout: key = AQDNgcxpcl46BhAA9B4doC4rromFqwiMK0ZkZA== 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout:[osd.6] 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout: key = AQDNgcxpjcsyNhAAg5uPUxVfc58j8f1JdQG3rw== 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout:[osd.7] 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout: key = AQDOgcxpBMNFLBAAK56lvW0l51hQ9d65tbPAOw== 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout:[client.0] 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout: key = AQDGgcxpitrfLBAAoJIFrd8zjbL3h0eBrY7QhQ== 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout:[client.1] 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout: key = AQDGgcxpTtvlLxAAxEhLpAUqgqxXx2/wAzNOHA== 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout:[client.2] 2026-04-01T02:24:16.227 INFO:teuthology.orchestra.run.vm03.stdout: key = AQDGgcxpArDKMhAAKtsch8mTntAAtxI7ZSuXKQ== 2026-04-01T02:24:16.247 INFO:teuthology.orchestra.run.vm06.stdout:[mgr.y] 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout: key = AQDGgcxpO5wCJxAAL+jD/VeVlxMovOpecorXBw== 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout:[osd.0] 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout: key = AQDIgcxphtMOAxAACDqdU1JZ+c9NWn4h8QuUGg== 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout:[osd.1] 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout: key = AQDIgcxpnUUmNBAARamIxTvQ3d0OeO820vERmQ== 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout:[osd.2] 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout: key = AQDJgcxpqxZWJxAAS0H+yexy8yciHBF2mjJJMg== 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout:[osd.3] 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout: key = AQDKgcxpvEAnGxAAWxzgGIvtnxo8cIMDyYZUYw== 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout:[mgr.x] 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout: key = AQDGgcxpFhwdKhAAkDysmkA9CaoLWyS3dGjUMQ== 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout:[osd.4] 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout: key = AQDMgcxp+MstExAAwohfhmhpBnoevk5ZTwIZQA== 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout:[osd.5] 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout: key = AQDNgcxpcl46BhAA9B4doC4rromFqwiMK0ZkZA== 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout:[osd.6] 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout: key = AQDNgcxpjcsyNhAAg5uPUxVfc58j8f1JdQG3rw== 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout:[osd.7] 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout: key = AQDOgcxpBMNFLBAAK56lvW0l51hQ9d65tbPAOw== 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout:[client.0] 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout: key = AQDGgcxpitrfLBAAoJIFrd8zjbL3h0eBrY7QhQ== 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout:[client.1] 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout: key = AQDGgcxpTtvlLxAAxEhLpAUqgqxXx2/wAzNOHA== 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout:[client.2] 2026-04-01T02:24:16.248 INFO:teuthology.orchestra.run.vm06.stdout: key = AQDGgcxpArDKMhAAKtsch8mTntAAtxI7ZSuXKQ== 2026-04-01T02:24:16.249 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=mgr.y --cap mon 'allow profile mgr' --cap osd 'allow *' --cap mds 'allow *' 2026-04-01T02:24:16.269 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=mgr.y --cap mon 'allow profile mgr' --cap osd 'allow *' --cap mds 'allow *' 2026-04-01T02:24:16.338 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.0 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.340 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.0 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.388 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.1 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.389 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.1 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.444 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.2 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.445 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.2 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.493 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.3 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.494 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.3 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.544 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=mgr.x --cap mon 'allow profile mgr' --cap osd 'allow *' --cap mds 'allow *' 2026-04-01T02:24:16.545 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=mgr.x --cap mon 'allow profile mgr' --cap osd 'allow *' --cap mds 'allow *' 2026-04-01T02:24:16.594 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.4 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.595 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.4 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.653 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.5 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.654 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.5 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.707 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.6 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.708 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.6 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.758 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.7 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.760 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=osd.7 --cap mon 'allow profile osd' --cap mgr 'allow profile osd' --cap osd 'allow *' 2026-04-01T02:24:16.813 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=client.0 --cap mon 'allow rw' --cap mgr 'allow r' --cap osd 'allow rwx' --cap mds allow 2026-04-01T02:24:16.815 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=client.0 --cap mon 'allow rw' --cap mgr 'allow r' --cap osd 'allow rwx' --cap mds allow 2026-04-01T02:24:16.866 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=client.1 --cap mon 'allow rw' --cap mgr 'allow r' --cap osd 'allow rwx' --cap mds allow 2026-04-01T02:24:16.868 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=client.1 --cap mon 'allow rw' --cap mgr 'allow r' --cap osd 'allow rwx' --cap mds allow 2026-04-01T02:24:16.918 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=client.2 --cap mon 'allow rw' --cap mgr 'allow r' --cap osd 'allow rwx' --cap mds allow 2026-04-01T02:24:16.920 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-authtool /etc/ceph/ceph.keyring --name=client.2 --cap mon 'allow rw' --cap mgr 'allow r' --cap osd 'allow rwx' --cap mds allow 2026-04-01T02:24:16.975 INFO:tasks.ceph:Running mkfs on mon nodes... 2026-04-01T02:24:16.975 DEBUG:teuthology.orchestra.run.vm03:> sudo mkdir -p /var/lib/ceph/mon/ceph-a 2026-04-01T02:24:17.004 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-mon --cluster ceph --mkfs -i a --monmap /home/ubuntu/cephtest/ceph.monmap --keyring /etc/ceph/ceph.keyring 2026-04-01T02:24:17.102 DEBUG:teuthology.orchestra.run.vm03:> sudo chown -R ceph:ceph /var/lib/ceph/mon/ceph-a 2026-04-01T02:24:17.129 DEBUG:teuthology.orchestra.run.vm03:> sudo mkdir -p /var/lib/ceph/mon/ceph-c 2026-04-01T02:24:17.201 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-mon --cluster ceph --mkfs -i c --monmap /home/ubuntu/cephtest/ceph.monmap --keyring /etc/ceph/ceph.keyring 2026-04-01T02:24:17.303 DEBUG:teuthology.orchestra.run.vm03:> sudo chown -R ceph:ceph /var/lib/ceph/mon/ceph-c 2026-04-01T02:24:17.332 DEBUG:teuthology.orchestra.run.vm06:> sudo mkdir -p /var/lib/ceph/mon/ceph-b 2026-04-01T02:24:17.359 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph-mon --cluster ceph --mkfs -i b --monmap /home/ubuntu/cephtest/ceph.monmap --keyring /etc/ceph/ceph.keyring 2026-04-01T02:24:17.456 DEBUG:teuthology.orchestra.run.vm06:> sudo chown -R ceph:ceph /var/lib/ceph/mon/ceph-b 2026-04-01T02:24:17.486 DEBUG:teuthology.orchestra.run.vm03:> rm -- /home/ubuntu/cephtest/ceph.monmap 2026-04-01T02:24:17.487 DEBUG:teuthology.orchestra.run.vm06:> rm -- /home/ubuntu/cephtest/ceph.monmap 2026-04-01T02:24:17.544 INFO:tasks.ceph:Starting mon daemons in cluster ceph... 2026-04-01T02:24:17.544 INFO:tasks.ceph.mon.a:Restarting daemon 2026-04-01T02:24:17.544 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-mon -f --cluster ceph -i a 2026-04-01T02:24:17.546 INFO:tasks.ceph.mon.a:Started 2026-04-01T02:24:17.546 INFO:tasks.ceph.mon.c:Restarting daemon 2026-04-01T02:24:17.546 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-mon -f --cluster ceph -i c 2026-04-01T02:24:17.547 INFO:tasks.ceph.mon.c:Started 2026-04-01T02:24:17.547 INFO:tasks.ceph.mon.b:Restarting daemon 2026-04-01T02:24:17.547 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-mon -f --cluster ceph -i b 2026-04-01T02:24:17.586 INFO:tasks.ceph.mon.b:Started 2026-04-01T02:24:17.586 INFO:tasks.ceph:Starting mgr daemons in cluster ceph... 2026-04-01T02:24:17.587 INFO:tasks.ceph.mgr.y:Restarting daemon 2026-04-01T02:24:17.587 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-mgr -f --cluster ceph -i y 2026-04-01T02:24:17.588 INFO:tasks.ceph.mgr.y:Started 2026-04-01T02:24:17.588 INFO:tasks.ceph.mgr.x:Restarting daemon 2026-04-01T02:24:17.588 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-mgr -f --cluster ceph -i x 2026-04-01T02:24:17.590 INFO:tasks.ceph.mgr.x:Started 2026-04-01T02:24:17.590 DEBUG:tasks.ceph:set 0 configs 2026-04-01T02:24:17.590 DEBUG:teuthology.orchestra.run.vm03:> sudo ceph --cluster ceph config dump 2026-04-01T02:24:23.558 INFO:tasks.ceph.mgr.x.vm06.stderr:/usr/lib64/python3.9/site-packages/scipy/__init__.py:73: UserWarning: NumPy was imported from a Python sub-interpreter but NumPy does not properly support sub-interpreters. This will likely work for most users but might cause hard to track down issues or subtle bugs. A common user of the rare sub-interpreter feature is wsgi which also allows single-interpreter mode. 2026-04-01T02:24:23.558 INFO:tasks.ceph.mgr.x.vm06.stderr:Improvements in the case of bugs are welcome, but is not on the NumPy roadmap, and full support may require significant effort to achieve. 2026-04-01T02:24:23.558 INFO:tasks.ceph.mgr.x.vm06.stderr: from numpy import show_config as show_numpy_config 2026-04-01T02:24:23.593 INFO:tasks.ceph.mgr.y.vm03.stderr:/usr/lib64/python3.9/site-packages/scipy/__init__.py:73: UserWarning: NumPy was imported from a Python sub-interpreter but NumPy does not properly support sub-interpreters. This will likely work for most users but might cause hard to track down issues or subtle bugs. A common user of the rare sub-interpreter feature is wsgi which also allows single-interpreter mode. 2026-04-01T02:24:23.593 INFO:tasks.ceph.mgr.y.vm03.stderr:Improvements in the case of bugs are welcome, but is not on the NumPy roadmap, and full support may require significant effort to achieve. 2026-04-01T02:24:23.593 INFO:tasks.ceph.mgr.y.vm03.stderr: from numpy import show_config as show_numpy_config 2026-04-01T02:24:23.726 INFO:teuthology.orchestra.run.vm03.stdout:WHO MASK LEVEL OPTION VALUE RO 2026-04-01T02:24:23.737 INFO:tasks.ceph:Setting crush tunables to default 2026-04-01T02:24:23.737 DEBUG:teuthology.orchestra.run.vm03:> sudo ceph --cluster ceph osd crush tunables default 2026-04-01T02:24:23.868 INFO:teuthology.orchestra.run.vm03.stderr:adjusted tunables profile to default 2026-04-01T02:24:23.881 INFO:tasks.ceph:check_enable_crimson: False 2026-04-01T02:24:23.881 INFO:tasks.ceph:Starting osd daemons in cluster ceph... 2026-04-01T02:24:23.881 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:23.881 DEBUG:teuthology.orchestra.run.vm03:> sudo dd if=/var/lib/ceph/osd/ceph-0/fsid of=/dev/stdout 2026-04-01T02:24:23.920 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:23.920 DEBUG:teuthology.orchestra.run.vm03:> sudo dd if=/var/lib/ceph/osd/ceph-1/fsid of=/dev/stdout 2026-04-01T02:24:23.992 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:23.993 DEBUG:teuthology.orchestra.run.vm03:> sudo dd if=/var/lib/ceph/osd/ceph-2/fsid of=/dev/stdout 2026-04-01T02:24:24.065 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:24:24.065 DEBUG:teuthology.orchestra.run.vm03:> sudo dd if=/var/lib/ceph/osd/ceph-3/fsid of=/dev/stdout 2026-04-01T02:24:24.133 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:24.133 DEBUG:teuthology.orchestra.run.vm06:> sudo dd if=/var/lib/ceph/osd/ceph-4/fsid of=/dev/stdout 2026-04-01T02:24:24.160 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:24.160 DEBUG:teuthology.orchestra.run.vm06:> sudo dd if=/var/lib/ceph/osd/ceph-5/fsid of=/dev/stdout 2026-04-01T02:24:24.227 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:24.227 DEBUG:teuthology.orchestra.run.vm06:> sudo dd if=/var/lib/ceph/osd/ceph-6/fsid of=/dev/stdout 2026-04-01T02:24:24.293 DEBUG:teuthology.orchestra.run.vm06:> set -ex 2026-04-01T02:24:24.293 DEBUG:teuthology.orchestra.run.vm06:> sudo dd if=/var/lib/ceph/osd/ceph-7/fsid of=/dev/stdout 2026-04-01T02:24:24.358 DEBUG:teuthology.orchestra.run.vm06:> sudo ceph --cluster ceph osd new 89e72097-68d3-4917-9d63-d02f1bf68f31 0 2026-04-01T02:24:24.516 INFO:teuthology.orchestra.run.vm06.stdout:0 2026-04-01T02:24:24.526 DEBUG:teuthology.orchestra.run.vm06:> sudo ceph --cluster ceph osd new c09bed04-11ca-413a-b62a-7dbf885b020a 1 2026-04-01T02:24:24.646 INFO:teuthology.orchestra.run.vm06.stdout:1 2026-04-01T02:24:24.657 DEBUG:teuthology.orchestra.run.vm06:> sudo ceph --cluster ceph osd new 0e3dcff8-36fd-48b2-aced-989316025420 2 2026-04-01T02:24:24.774 INFO:teuthology.orchestra.run.vm06.stdout:2 2026-04-01T02:24:24.785 DEBUG:teuthology.orchestra.run.vm06:> sudo ceph --cluster ceph osd new e32456e4-6faf-4dc1-822f-ef8a2ea3d6c2 3 2026-04-01T02:24:24.906 INFO:teuthology.orchestra.run.vm06.stdout:3 2026-04-01T02:24:24.915 DEBUG:teuthology.orchestra.run.vm06:> sudo ceph --cluster ceph osd new f0f1f55b-af45-4403-bd31-41859f048b53 4 2026-04-01T02:24:25.032 INFO:teuthology.orchestra.run.vm06.stdout:4 2026-04-01T02:24:25.042 DEBUG:teuthology.orchestra.run.vm06:> sudo ceph --cluster ceph osd new d88b8f6d-010d-4f77-8ded-f8a9fd351fe7 5 2026-04-01T02:24:25.164 INFO:teuthology.orchestra.run.vm06.stdout:5 2026-04-01T02:24:25.175 DEBUG:teuthology.orchestra.run.vm06:> sudo ceph --cluster ceph osd new 1c881e96-364e-4f1e-a73e-9b75d1db46dd 6 2026-04-01T02:24:25.299 INFO:teuthology.orchestra.run.vm06.stdout:6 2026-04-01T02:24:25.309 DEBUG:teuthology.orchestra.run.vm06:> sudo ceph --cluster ceph osd new 6c76eb32-2791-48e0-a9ca-205cacde72bc 7 2026-04-01T02:24:25.433 INFO:teuthology.orchestra.run.vm06.stdout:7 2026-04-01T02:24:25.441 INFO:tasks.ceph.osd.0:Restarting daemon 2026-04-01T02:24:25.441 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 0 2026-04-01T02:24:25.442 INFO:tasks.ceph.osd.0:Started 2026-04-01T02:24:25.442 INFO:tasks.ceph.osd.1:Restarting daemon 2026-04-01T02:24:25.442 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 1 2026-04-01T02:24:25.443 INFO:tasks.ceph.osd.1:Started 2026-04-01T02:24:25.443 INFO:tasks.ceph.osd.2:Restarting daemon 2026-04-01T02:24:25.444 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 2 2026-04-01T02:24:25.445 INFO:tasks.ceph.osd.2:Started 2026-04-01T02:24:25.445 INFO:tasks.ceph.osd.3:Restarting daemon 2026-04-01T02:24:25.445 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 3 2026-04-01T02:24:25.447 INFO:tasks.ceph.osd.3:Started 2026-04-01T02:24:25.447 INFO:tasks.ceph.osd.4:Restarting daemon 2026-04-01T02:24:25.447 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 4 2026-04-01T02:24:25.448 INFO:tasks.ceph.osd.4:Started 2026-04-01T02:24:25.448 INFO:tasks.ceph.osd.5:Restarting daemon 2026-04-01T02:24:25.448 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 5 2026-04-01T02:24:25.449 INFO:tasks.ceph.osd.5:Started 2026-04-01T02:24:25.449 INFO:tasks.ceph.osd.6:Restarting daemon 2026-04-01T02:24:25.450 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 6 2026-04-01T02:24:25.451 INFO:tasks.ceph.osd.6:Started 2026-04-01T02:24:25.451 INFO:tasks.ceph.osd.7:Restarting daemon 2026-04-01T02:24:25.451 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 7 2026-04-01T02:24:25.453 INFO:tasks.ceph.osd.7:Started 2026-04-01T02:24:25.453 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd dump --format=json 2026-04-01T02:24:25.610 INFO:tasks.ceph.osd.3.vm03.stderr:2026-04-01T02:24:25.607+0000 7f360d95e900 -1 Falling back to public interface 2026-04-01T02:24:25.613 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:25.613 INFO:teuthology.orchestra.run.vm03.stdout:{"epoch":10,"fsid":"1338c6ab-9330-4cd3-91bf-71d5668f30ea","created":"2026-04-01T02:24:22.859889+0000","modified":"2026-04-01T02:24:25.426214+0000","last_up_change":"0.000000","last_in_change":"2026-04-01T02:24:25.426214+0000","flags":"sortbitwise,recovery_deletes,purged_snapdirs,pglog_hardlimit","flags_num":5799936,"flags_set":["pglog_hardlimit","purged_snapdirs","recovery_deletes","sortbitwise"],"crush_version":2,"full_ratio":0.94999998807907104,"backfillfull_ratio":0.89999997615814209,"nearfull_ratio":0.85000002384185791,"cluster_snapshot":"","pool_max":0,"max_osd":8,"require_min_compat_client":"luminous","min_compat_client":"jewel","require_osd_release":"tentacle","allow_crimson":false,"pools":[],"osds":[{"osd":0,"uuid":"89e72097-68d3-4917-9d63-d02f1bf68f31","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]},{"osd":1,"uuid":"c09bed04-11ca-413a-b62a-7dbf885b020a","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]},{"osd":2,"uuid":"0e3dcff8-36fd-48b2-aced-989316025420","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]},{"osd":3,"uuid":"e32456e4-6faf-4dc1-822f-ef8a2ea3d6c2","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]},{"osd":4,"uuid":"f0f1f55b-af45-4403-bd31-41859f048b53","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]},{"osd":5,"uuid":"d88b8f6d-010d-4f77-8ded-f8a9fd351fe7","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]},{"osd":6,"uuid":"1c881e96-364e-4f1e-a73e-9b75d1db46dd","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]},{"osd":7,"uuid":"6c76eb32-2791-48e0-a9ca-205cacde72bc","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]}],"osd_xinfo":[{"osd":0,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":1,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":2,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":3,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":4,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":5,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":6,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":7,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0}],"pg_upmap":[],"pg_upmap_items":[],"pg_upmap_primaries":[],"pg_temp":[],"primary_temp":[],"blocklist":{},"range_blocklist":{},"erasure_code_profiles":{"default":{"crush-failure-domain":"osd","k":"2","m":"1","plugin":"isa","technique":"reed_sol_van"}},"removed_snaps_queue":[],"new_removed_snaps":[],"new_purged_snaps":[],"crush_node_flags":{},"device_class_flags":{},"stretch_mode":{"stretch_mode_enabled":false,"stretch_bucket_count":0,"degraded_stretch_mode":0,"recovering_stretch_mode":0,"stretch_mode_bucket":0}} 2026-04-01T02:24:25.615 INFO:tasks.ceph.osd.1.vm03.stderr:2026-04-01T02:24:25.612+0000 7fa3465b9900 -1 Falling back to public interface 2026-04-01T02:24:25.620 INFO:tasks.ceph.osd.6.vm06.stderr:2026-04-01T02:24:25.617+0000 7f6af9363900 -1 Falling back to public interface 2026-04-01T02:24:25.625 INFO:tasks.ceph.osd.2.vm03.stderr:2026-04-01T02:24:25.621+0000 7f6c921ce900 -1 Falling back to public interface 2026-04-01T02:24:25.625 INFO:tasks.ceph.osd.0.vm03.stderr:2026-04-01T02:24:25.622+0000 7fd1640e4900 -1 Falling back to public interface 2026-04-01T02:24:25.625 INFO:tasks.ceph.osd.0.vm03.stderr:2026-04-01T02:24:25.623+0000 7fd163089640 -1 PosixStack listen unable to listen on v2:0.0.0.0:6822/0: (98) Address already in use 2026-04-01T02:24:25.627 INFO:tasks.ceph.ceph_manager.ceph:[] 2026-04-01T02:24:25.627 INFO:tasks.ceph:Waiting for OSDs to come up 2026-04-01T02:24:25.637 INFO:tasks.ceph.osd.7.vm06.stderr:2026-04-01T02:24:25.634+0000 7f162cecd900 -1 Falling back to public interface 2026-04-01T02:24:25.642 INFO:tasks.ceph.osd.5.vm06.stderr:2026-04-01T02:24:25.639+0000 7f515c294900 -1 Falling back to public interface 2026-04-01T02:24:25.642 INFO:tasks.ceph.osd.4.vm06.stderr:2026-04-01T02:24:25.639+0000 7f899a896900 -1 Falling back to public interface 2026-04-01T02:24:26.002 INFO:tasks.ceph.osd.6.vm06.stderr:2026-04-01T02:24:25.999+0000 7f6af9363900 -1 osd.6 0 log_to_monitors true 2026-04-01T02:24:26.009 INFO:tasks.ceph.osd.1.vm03.stderr:2026-04-01T02:24:26.006+0000 7fa3465b9900 -1 osd.1 0 log_to_monitors true 2026-04-01T02:24:26.014 INFO:tasks.ceph.osd.3.vm03.stderr:2026-04-01T02:24:26.011+0000 7f360d95e900 -1 osd.3 0 log_to_monitors true 2026-04-01T02:24:26.057 INFO:tasks.ceph.osd.7.vm06.stderr:2026-04-01T02:24:26.054+0000 7f162cecd900 -1 osd.7 0 log_to_monitors true 2026-04-01T02:24:26.064 INFO:tasks.ceph.osd.4.vm06.stderr:2026-04-01T02:24:26.061+0000 7f899a896900 -1 osd.4 0 log_to_monitors true 2026-04-01T02:24:26.076 INFO:tasks.ceph.osd.2.vm03.stderr:2026-04-01T02:24:26.073+0000 7f6c921ce900 -1 osd.2 0 log_to_monitors true 2026-04-01T02:24:26.077 INFO:tasks.ceph.osd.5.vm06.stderr:2026-04-01T02:24:26.074+0000 7f515c294900 -1 osd.5 0 log_to_monitors true 2026-04-01T02:24:26.088 INFO:tasks.ceph.osd.0.vm03.stderr:2026-04-01T02:24:26.085+0000 7fd1640e4900 -1 osd.0 0 log_to_monitors true 2026-04-01T02:24:26.256 INFO:tasks.ceph.mgr.x.vm06.stderr:2026-04-01T02:24:26.253+0000 7fc5134f1640 -1 mgr.server handle_report got status from non-daemon mon.c 2026-04-01T02:24:26.256 INFO:tasks.ceph.mgr.x.vm06.stderr:2026-04-01T02:24:26.253+0000 7fc5134f1640 -1 mgr.server handle_report got status from non-daemon mon.b 2026-04-01T02:24:26.432 DEBUG:teuthology.orchestra.run.vm03:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph --cluster ceph osd dump --format=json 2026-04-01T02:24:26.629 INFO:teuthology.misc.health.vm03.stdout: 2026-04-01T02:24:26.629 INFO:teuthology.misc.health.vm03.stdout:{"epoch":10,"fsid":"1338c6ab-9330-4cd3-91bf-71d5668f30ea","created":"2026-04-01T02:24:22.859889+0000","modified":"2026-04-01T02:24:25.426214+0000","last_up_change":"0.000000","last_in_change":"2026-04-01T02:24:25.426214+0000","flags":"sortbitwise,recovery_deletes,purged_snapdirs,pglog_hardlimit","flags_num":5799936,"flags_set":["pglog_hardlimit","purged_snapdirs","recovery_deletes","sortbitwise"],"crush_version":2,"full_ratio":0.94999998807907104,"backfillfull_ratio":0.89999997615814209,"nearfull_ratio":0.85000002384185791,"cluster_snapshot":"","pool_max":0,"max_osd":8,"require_min_compat_client":"luminous","min_compat_client":"jewel","require_osd_release":"tentacle","allow_crimson":false,"pools":[],"osds":[{"osd":0,"uuid":"89e72097-68d3-4917-9d63-d02f1bf68f31","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]},{"osd":1,"uuid":"c09bed04-11ca-413a-b62a-7dbf885b020a","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]},{"osd":2,"uuid":"0e3dcff8-36fd-48b2-aced-989316025420","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]},{"osd":3,"uuid":"e32456e4-6faf-4dc1-822f-ef8a2ea3d6c2","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]},{"osd":4,"uuid":"f0f1f55b-af45-4403-bd31-41859f048b53","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]},{"osd":5,"uuid":"d88b8f6d-010d-4f77-8ded-f8a9fd351fe7","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]},{"osd":6,"uuid":"1c881e96-364e-4f1e-a73e-9b75d1db46dd","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]},{"osd":7,"uuid":"6c76eb32-2791-48e0-a9ca-205cacde72bc","up":0,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":0,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[]},"cluster_addrs":{"addrvec":[]},"heartbeat_back_addrs":{"addrvec":[]},"heartbeat_front_addrs":{"addrvec":[]},"public_addr":"(unrecognized address family 0)/0","cluster_addr":"(unrecognized address family 0)/0","heartbeat_back_addr":"(unrecognized address family 0)/0","heartbeat_front_addr":"(unrecognized address family 0)/0","state":["exists","new"]}],"osd_xinfo":[{"osd":0,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":1,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":2,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":3,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":4,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":5,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":6,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":7,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":0,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0}],"pg_upmap":[],"pg_upmap_items":[],"pg_upmap_primaries":[],"pg_temp":[],"primary_temp":[],"blocklist":{},"range_blocklist":{},"erasure_code_profiles":{"default":{"crush-failure-domain":"osd","k":"2","m":"1","plugin":"isa","technique":"reed_sol_van"}},"removed_snaps_queue":[],"new_removed_snaps":[],"new_purged_snaps":[],"crush_node_flags":{},"device_class_flags":{},"stretch_mode":{"stretch_mode_enabled":false,"stretch_bucket_count":0,"degraded_stretch_mode":0,"recovering_stretch_mode":0,"stretch_mode_bucket":0}} 2026-04-01T02:24:26.639 DEBUG:teuthology.misc:0 of 8 OSDs are up 2026-04-01T02:24:32.278 INFO:tasks.ceph.osd.3.vm03.stderr:2026-04-01T02:24:32.275+0000 7f36098ec640 -1 osd.3 0 waiting for initial osdmap 2026-04-01T02:24:32.293 INFO:tasks.ceph.osd.3.vm03.stderr:2026-04-01T02:24:32.290+0000 7f36046f0640 -1 osd.3 12 set_numa_affinity unable to identify public interface '' numa node: (2) No such file or directory 2026-04-01T02:24:32.357 INFO:tasks.ceph.osd.0.vm03.stderr:2026-04-01T02:24:32.354+0000 7fd160072640 -1 osd.0 0 waiting for initial osdmap 2026-04-01T02:24:32.381 INFO:tasks.ceph.osd.0.vm03.stderr:2026-04-01T02:24:32.378+0000 7fd15ae76640 -1 osd.0 12 set_numa_affinity unable to identify public interface '' numa node: (2) No such file or directory 2026-04-01T02:24:32.382 INFO:tasks.ceph.osd.1.vm03.stderr:2026-04-01T02:24:32.379+0000 7fa342547640 -1 osd.1 0 waiting for initial osdmap 2026-04-01T02:24:32.394 INFO:tasks.ceph.osd.1.vm03.stderr:2026-04-01T02:24:32.391+0000 7fa33d34b640 -1 osd.1 12 set_numa_affinity unable to identify public interface '' numa node: (2) No such file or directory 2026-04-01T02:24:32.419 INFO:tasks.ceph.osd.7.vm06.stderr:2026-04-01T02:24:32.416+0000 7f1628e5b640 -1 osd.7 0 waiting for initial osdmap 2026-04-01T02:24:32.438 INFO:tasks.ceph.osd.7.vm06.stderr:2026-04-01T02:24:32.435+0000 7f1623c5f640 -1 osd.7 12 set_numa_affinity unable to identify public interface '' numa node: (2) No such file or directory 2026-04-01T02:24:32.468 INFO:tasks.ceph.osd.4.vm06.stderr:2026-04-01T02:24:32.465+0000 7f8996826640 -1 osd.4 0 waiting for initial osdmap 2026-04-01T02:24:32.486 INFO:tasks.ceph.osd.4.vm06.stderr:2026-04-01T02:24:32.484+0000 7f899162a640 -1 osd.4 12 set_numa_affinity unable to identify public interface '' numa node: (2) No such file or directory 2026-04-01T02:24:32.512 INFO:tasks.ceph.osd.5.vm06.stderr:2026-04-01T02:24:32.509+0000 7f5158222640 -1 osd.5 0 waiting for initial osdmap 2026-04-01T02:24:32.522 INFO:tasks.ceph.osd.2.vm03.stderr:2026-04-01T02:24:32.519+0000 7f6c8e15e640 -1 osd.2 0 waiting for initial osdmap 2026-04-01T02:24:32.526 INFO:tasks.ceph.osd.5.vm06.stderr:2026-04-01T02:24:32.523+0000 7f5153026640 -1 osd.5 12 set_numa_affinity unable to identify public interface '' numa node: (2) No such file or directory 2026-04-01T02:24:32.539 INFO:tasks.ceph.osd.2.vm03.stderr:2026-04-01T02:24:32.536+0000 7f6c88f62640 -1 osd.2 12 set_numa_affinity unable to identify public interface '' numa node: (2) No such file or directory 2026-04-01T02:24:32.580 INFO:tasks.ceph.osd.6.vm06.stderr:2026-04-01T02:24:32.578+0000 7f6af5b05640 -1 osd.6 0 waiting for initial osdmap 2026-04-01T02:24:32.593 INFO:tasks.ceph.osd.6.vm06.stderr:2026-04-01T02:24:32.591+0000 7f6af00f7640 -1 osd.6 12 set_numa_affinity unable to identify public interface '' numa node: (2) No such file or directory 2026-04-01T02:24:33.442 DEBUG:teuthology.orchestra.run.vm03:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage ceph --cluster ceph osd dump --format=json 2026-04-01T02:24:33.656 INFO:teuthology.misc.health.vm03.stdout: 2026-04-01T02:24:33.656 INFO:teuthology.misc.health.vm03.stdout:{"epoch":13,"fsid":"1338c6ab-9330-4cd3-91bf-71d5668f30ea","created":"2026-04-01T02:24:22.859889+0000","modified":"2026-04-01T02:24:33.279665+0000","last_up_change":"2026-04-01T02:24:33.279665+0000","last_in_change":"2026-04-01T02:24:25.426214+0000","flags":"sortbitwise,recovery_deletes,purged_snapdirs,pglog_hardlimit","flags_num":5799936,"flags_set":["pglog_hardlimit","purged_snapdirs","recovery_deletes","sortbitwise"],"crush_version":4,"full_ratio":0.94999998807907104,"backfillfull_ratio":0.89999997615814209,"nearfull_ratio":0.85000002384185791,"cluster_snapshot":"","pool_max":0,"max_osd":8,"require_min_compat_client":"luminous","min_compat_client":"jewel","require_osd_release":"tentacle","allow_crimson":false,"pools":[],"osds":[{"osd":0,"uuid":"89e72097-68d3-4917-9d63-d02f1bf68f31","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6823","nonce":1019300300},{"type":"v1","addr":"192.168.123.103:6825","nonce":1019300300}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6826","nonce":1019300300},{"type":"v1","addr":"192.168.123.103:6827","nonce":1019300300}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6830","nonce":1019300300},{"type":"v1","addr":"192.168.123.103:6831","nonce":1019300300}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6828","nonce":1019300300},{"type":"v1","addr":"192.168.123.103:6829","nonce":1019300300}]},"public_addr":"192.168.123.103:6825/1019300300","cluster_addr":"192.168.123.103:6827/1019300300","heartbeat_back_addr":"192.168.123.103:6831/1019300300","heartbeat_front_addr":"192.168.123.103:6829/1019300300","state":["exists","up"]},{"osd":1,"uuid":"c09bed04-11ca-413a-b62a-7dbf885b020a","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6808","nonce":752174647},{"type":"v1","addr":"192.168.123.103:6809","nonce":752174647}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6810","nonce":752174647},{"type":"v1","addr":"192.168.123.103:6811","nonce":752174647}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6814","nonce":752174647},{"type":"v1","addr":"192.168.123.103:6815","nonce":752174647}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6812","nonce":752174647},{"type":"v1","addr":"192.168.123.103:6813","nonce":752174647}]},"public_addr":"192.168.123.103:6809/752174647","cluster_addr":"192.168.123.103:6811/752174647","heartbeat_back_addr":"192.168.123.103:6815/752174647","heartbeat_front_addr":"192.168.123.103:6813/752174647","state":["exists","up"]},{"osd":2,"uuid":"0e3dcff8-36fd-48b2-aced-989316025420","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6816","nonce":58483728},{"type":"v1","addr":"192.168.123.103:6817","nonce":58483728}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6818","nonce":58483728},{"type":"v1","addr":"192.168.123.103:6819","nonce":58483728}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6822","nonce":58483728},{"type":"v1","addr":"192.168.123.103:6824","nonce":58483728}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6820","nonce":58483728},{"type":"v1","addr":"192.168.123.103:6821","nonce":58483728}]},"public_addr":"192.168.123.103:6817/58483728","cluster_addr":"192.168.123.103:6819/58483728","heartbeat_back_addr":"192.168.123.103:6824/58483728","heartbeat_front_addr":"192.168.123.103:6821/58483728","state":["exists","up"]},{"osd":3,"uuid":"e32456e4-6faf-4dc1-822f-ef8a2ea3d6c2","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6800","nonce":3056993723},{"type":"v1","addr":"192.168.123.103:6801","nonce":3056993723}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6802","nonce":3056993723},{"type":"v1","addr":"192.168.123.103:6803","nonce":3056993723}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6806","nonce":3056993723},{"type":"v1","addr":"192.168.123.103:6807","nonce":3056993723}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6804","nonce":3056993723},{"type":"v1","addr":"192.168.123.103:6805","nonce":3056993723}]},"public_addr":"192.168.123.103:6801/3056993723","cluster_addr":"192.168.123.103:6803/3056993723","heartbeat_back_addr":"192.168.123.103:6807/3056993723","heartbeat_front_addr":"192.168.123.103:6805/3056993723","state":["exists","up"]},{"osd":4,"uuid":"f0f1f55b-af45-4403-bd31-41859f048b53","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6819","nonce":3132188684},{"type":"v1","addr":"192.168.123.106:6821","nonce":3132188684}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6823","nonce":3132188684},{"type":"v1","addr":"192.168.123.106:6825","nonce":3132188684}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6831","nonce":3132188684},{"type":"v1","addr":"192.168.123.106:6833","nonce":3132188684}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6827","nonce":3132188684},{"type":"v1","addr":"192.168.123.106:6829","nonce":3132188684}]},"public_addr":"192.168.123.106:6821/3132188684","cluster_addr":"192.168.123.106:6825/3132188684","heartbeat_back_addr":"192.168.123.106:6833/3132188684","heartbeat_front_addr":"192.168.123.106:6829/3132188684","state":["exists","up"]},{"osd":5,"uuid":"d88b8f6d-010d-4f77-8ded-f8a9fd351fe7","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6818","nonce":2357546250},{"type":"v1","addr":"192.168.123.106:6820","nonce":2357546250}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6822","nonce":2357546250},{"type":"v1","addr":"192.168.123.106:6824","nonce":2357546250}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6830","nonce":2357546250},{"type":"v1","addr":"192.168.123.106:6832","nonce":2357546250}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6826","nonce":2357546250},{"type":"v1","addr":"192.168.123.106:6828","nonce":2357546250}]},"public_addr":"192.168.123.106:6820/2357546250","cluster_addr":"192.168.123.106:6824/2357546250","heartbeat_back_addr":"192.168.123.106:6832/2357546250","heartbeat_front_addr":"192.168.123.106:6828/2357546250","state":["exists","up"]},{"osd":6,"uuid":"1c881e96-364e-4f1e-a73e-9b75d1db46dd","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6802","nonce":2938966295},{"type":"v1","addr":"192.168.123.106:6803","nonce":2938966295}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6804","nonce":2938966295},{"type":"v1","addr":"192.168.123.106:6805","nonce":2938966295}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6808","nonce":2938966295},{"type":"v1","addr":"192.168.123.106:6809","nonce":2938966295}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6806","nonce":2938966295},{"type":"v1","addr":"192.168.123.106:6807","nonce":2938966295}]},"public_addr":"192.168.123.106:6803/2938966295","cluster_addr":"192.168.123.106:6805/2938966295","heartbeat_back_addr":"192.168.123.106:6809/2938966295","heartbeat_front_addr":"192.168.123.106:6807/2938966295","state":["exists","up"]},{"osd":7,"uuid":"6c76eb32-2791-48e0-a9ca-205cacde72bc","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6810","nonce":4085424980},{"type":"v1","addr":"192.168.123.106:6811","nonce":4085424980}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6812","nonce":4085424980},{"type":"v1","addr":"192.168.123.106:6813","nonce":4085424980}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6816","nonce":4085424980},{"type":"v1","addr":"192.168.123.106:6817","nonce":4085424980}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6814","nonce":4085424980},{"type":"v1","addr":"192.168.123.106:6815","nonce":4085424980}]},"public_addr":"192.168.123.106:6811/4085424980","cluster_addr":"192.168.123.106:6813/4085424980","heartbeat_back_addr":"192.168.123.106:6817/4085424980","heartbeat_front_addr":"192.168.123.106:6815/4085424980","state":["exists","up"]}],"osd_xinfo":[{"osd":0,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":1,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":2,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":3,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":4,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":5,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":6,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":7,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0}],"pg_upmap":[],"pg_upmap_items":[],"pg_upmap_primaries":[],"pg_temp":[],"primary_temp":[],"blocklist":{},"range_blocklist":{},"erasure_code_profiles":{"default":{"crush-failure-domain":"osd","k":"2","m":"1","plugin":"isa","technique":"reed_sol_van"}},"removed_snaps_queue":[],"new_removed_snaps":[],"new_purged_snaps":[],"crush_node_flags":{},"device_class_flags":{},"stretch_mode":{"stretch_mode_enabled":false,"stretch_bucket_count":0,"degraded_stretch_mode":0,"recovering_stretch_mode":0,"stretch_mode_bucket":0}} 2026-04-01T02:24:33.669 DEBUG:teuthology.misc:8 of 8 OSDs are up 2026-04-01T02:24:33.669 INFO:tasks.ceph:Creating RBD pool 2026-04-01T02:24:33.669 DEBUG:teuthology.orchestra.run.vm03:> sudo ceph --cluster ceph osd pool create rbd 8 2026-04-01T02:24:34.309 INFO:teuthology.orchestra.run.vm03.stderr:pool 'rbd' created 2026-04-01T02:24:34.322 DEBUG:teuthology.orchestra.run.vm03:> rbd --cluster ceph pool init rbd 2026-04-01T02:24:34.354 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:34.354 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:37.333 INFO:tasks.ceph:Starting mds daemons in cluster ceph... 2026-04-01T02:24:37.333 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph config log 1 --format=json 2026-04-01T02:24:37.334 INFO:tasks.daemonwatchdog.daemon_watchdog:watchdog starting 2026-04-01T02:24:37.564 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:37.579 INFO:teuthology.orchestra.run.vm03.stdout:[{"version":9,"timestamp":"2026-04-01T02:24:32.580809+0000","name":"","changes":[{"name":"osd.6/osd_mclock_max_capacity_iops_hdd","new_value":"4432.369481"}]}] 2026-04-01T02:24:37.579 INFO:tasks.ceph_manager:config epoch is 9 2026-04-01T02:24:37.579 INFO:tasks.ceph:Waiting until ceph daemons up and pgs clean... 2026-04-01T02:24:37.579 INFO:tasks.ceph.ceph_manager.ceph:waiting for mgr available 2026-04-01T02:24:37.579 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph mgr dump --format=json 2026-04-01T02:24:37.805 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:37.816 INFO:teuthology.orchestra.run.vm03.stdout:{"epoch":5,"flags":0,"active_gid":4105,"active_name":"x","active_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6800","nonce":3897747400},{"type":"v1","addr":"192.168.123.106:6801","nonce":3897747400}]},"active_addr":"192.168.123.106:6801/3897747400","active_change":"2026-04-01T02:24:25.235921+0000","active_mgr_features":4541880224203014143,"available":true,"standbys":[{"gid":4104,"name":"y","mgr_features":4541880224203014143,"available_modules":[{"name":"alerts","can_run":true,"error_string":"","module_options":{"interval":{"name":"interval","type":"secs","level":"advanced","flags":1,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"How frequently to reexamine health status","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"smtp_destination":{"name":"smtp_destination","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Email address to send alerts to, use commas to separate multiple","long_desc":"","tags":[],"see_also":[]},"smtp_from_name":{"name":"smtp_from_name","type":"str","level":"advanced","flags":1,"default_value":"Ceph","min":"","max":"","enum_allowed":[],"desc":"Email From: name","long_desc":"","tags":[],"see_also":[]},"smtp_host":{"name":"smtp_host","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"SMTP server","long_desc":"","tags":[],"see_also":[]},"smtp_password":{"name":"smtp_password","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Password to authenticate with","long_desc":"","tags":[],"see_also":[]},"smtp_port":{"name":"smtp_port","type":"int","level":"advanced","flags":1,"default_value":"465","min":"","max":"","enum_allowed":[],"desc":"SMTP port","long_desc":"","tags":[],"see_also":[]},"smtp_sender":{"name":"smtp_sender","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"SMTP envelope sender","long_desc":"","tags":[],"see_also":[]},"smtp_ssl":{"name":"smtp_ssl","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Use SSL to connect to SMTP server","long_desc":"","tags":[],"see_also":[]},"smtp_user":{"name":"smtp_user","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"User to authenticate as","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"balancer","can_run":true,"error_string":"","module_options":{"active":{"name":"active","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"automatically balance PGs across cluster","long_desc":"","tags":[],"see_also":[]},"begin_time":{"name":"begin_time","type":"str","level":"advanced","flags":1,"default_value":"0000","min":"","max":"","enum_allowed":[],"desc":"beginning time of day to automatically balance","long_desc":"This is a time of day in the format HHMM.","tags":[],"see_also":[]},"begin_weekday":{"name":"begin_weekday","type":"uint","level":"advanced","flags":1,"default_value":"0","min":"0","max":"6","enum_allowed":[],"desc":"Restrict automatic balancing to this day of the week or later","long_desc":"0 = Sunday, 1 = Monday, etc.","tags":[],"see_also":[]},"crush_compat_max_iterations":{"name":"crush_compat_max_iterations","type":"uint","level":"advanced","flags":1,"default_value":"25","min":"1","max":"250","enum_allowed":[],"desc":"maximum number of iterations to attempt optimization","long_desc":"","tags":[],"see_also":[]},"crush_compat_metrics":{"name":"crush_compat_metrics","type":"str","level":"advanced","flags":1,"default_value":"pgs,objects,bytes","min":"","max":"","enum_allowed":[],"desc":"metrics with which to calculate OSD utilization","long_desc":"Value is a list of one or more of \"pgs\", \"objects\", or \"bytes\", and indicates which metrics to use to balance utilization.","tags":[],"see_also":[]},"crush_compat_step":{"name":"crush_compat_step","type":"float","level":"advanced","flags":1,"default_value":"0.5","min":"0.001","max":"0.999","enum_allowed":[],"desc":"aggressiveness of optimization","long_desc":".99 is very aggressive, .01 is less aggressive","tags":[],"see_also":[]},"end_time":{"name":"end_time","type":"str","level":"advanced","flags":1,"default_value":"2359","min":"","max":"","enum_allowed":[],"desc":"ending time of day to automatically balance","long_desc":"This is a time of day in the format HHMM.","tags":[],"see_also":[]},"end_weekday":{"name":"end_weekday","type":"uint","level":"advanced","flags":1,"default_value":"0","min":"0","max":"6","enum_allowed":[],"desc":"Restrict automatic balancing to days of the week earlier than this","long_desc":"0 = Sunday, 1 = Monday, etc.","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"min_score":{"name":"min_score","type":"float","level":"advanced","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"minimum score, below which no optimization is attempted","long_desc":"","tags":[],"see_also":[]},"mode":{"name":"mode","type":"str","level":"advanced","flags":1,"default_value":"upmap","min":"","max":"","enum_allowed":["crush-compat","none","read","upmap","upmap-read"],"desc":"Balancer mode","long_desc":"","tags":[],"see_also":[]},"pool_ids":{"name":"pool_ids","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"pools which the automatic balancing will be limited to","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":1,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"how frequently to wake up and attempt optimization","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"update_pg_upmap_activity":{"name":"update_pg_upmap_activity","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Updates pg_upmap activity stats to be used in `balancer status detail`","long_desc":"","tags":[],"see_also":[]},"upmap_max_deviation":{"name":"upmap_max_deviation","type":"int","level":"advanced","flags":1,"default_value":"5","min":"1","max":"","enum_allowed":[],"desc":"deviation below which no optimization is attempted","long_desc":"If the number of PGs are within this count then no optimization is attempted","tags":[],"see_also":[]},"upmap_max_optimizations":{"name":"upmap_max_optimizations","type":"uint","level":"advanced","flags":1,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"maximum upmap optimizations to make per attempt","long_desc":"","tags":[],"see_also":[]}}},{"name":"cephadm","can_run":true,"error_string":"","module_options":{"agent_down_multiplier":{"name":"agent_down_multiplier","type":"float","level":"advanced","flags":0,"default_value":"3.0","min":"","max":"","enum_allowed":[],"desc":"Multiplied by agent refresh rate to calculate how long agent must not report before being marked down","long_desc":"","tags":[],"see_also":[]},"agent_refresh_rate":{"name":"agent_refresh_rate","type":"secs","level":"advanced","flags":0,"default_value":"20","min":"","max":"","enum_allowed":[],"desc":"How often agent on each host will try to gather and send metadata","long_desc":"","tags":[],"see_also":[]},"agent_starting_port":{"name":"agent_starting_port","type":"int","level":"advanced","flags":0,"default_value":"4721","min":"","max":"","enum_allowed":[],"desc":"First port agent will try to bind to (will also try up to next 1000 subsequent ports if blocked)","long_desc":"","tags":[],"see_also":[]},"allow_ptrace":{"name":"allow_ptrace","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"allow SYS_PTRACE capability on ceph containers","long_desc":"The SYS_PTRACE capability is needed to attach to a process with gdb or strace. Enabling this options can allow debugging daemons that encounter problems at runtime.","tags":[],"see_also":[]},"autotune_interval":{"name":"autotune_interval","type":"secs","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"how frequently to autotune daemon memory","long_desc":"","tags":[],"see_also":[]},"autotune_memory_target_ratio":{"name":"autotune_memory_target_ratio","type":"float","level":"advanced","flags":0,"default_value":"0.7","min":"","max":"","enum_allowed":[],"desc":"ratio of total system memory to divide amongst autotuned daemons","long_desc":"","tags":[],"see_also":[]},"cephadm_log_destination":{"name":"cephadm_log_destination","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":["file","file,syslog","syslog"],"desc":"Destination for cephadm command's persistent logging","long_desc":"","tags":[],"see_also":[]},"certificate_automated_rotation_enabled":{"name":"certificate_automated_rotation_enabled","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"This flag controls whether cephadm automatically rotates certificates upon expiration.","long_desc":"","tags":[],"see_also":[]},"certificate_check_debug_mode":{"name":"certificate_check_debug_mode","type":"bool","level":"dev","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"FOR TESTING ONLY: This flag forces the certificate check instead of waiting for certificate_check_period.","long_desc":"","tags":[],"see_also":[]},"certificate_check_period":{"name":"certificate_check_period","type":"int","level":"advanced","flags":0,"default_value":"1","min":"0","max":"30","enum_allowed":[],"desc":"Specifies how often (in days) the certificate should be checked for validity.","long_desc":"","tags":[],"see_also":[]},"certificate_duration_days":{"name":"certificate_duration_days","type":"int","level":"advanced","flags":0,"default_value":"1095","min":"90","max":"3650","enum_allowed":[],"desc":"Specifies the duration of self certificates generated and signed by cephadm root CA","long_desc":"","tags":[],"see_also":[]},"certificate_renewal_threshold_days":{"name":"certificate_renewal_threshold_days","type":"int","level":"advanced","flags":0,"default_value":"30","min":"10","max":"90","enum_allowed":[],"desc":"Specifies the lead time in days to initiate certificate renewal before expiration.","long_desc":"","tags":[],"see_also":[]},"cgroups_split":{"name":"cgroups_split","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Pass --cgroups=split when cephadm creates containers (currently podman only)","long_desc":"","tags":[],"see_also":[]},"config_checks_enabled":{"name":"config_checks_enabled","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable or disable the cephadm configuration analysis","long_desc":"","tags":[],"see_also":[]},"config_dashboard":{"name":"config_dashboard","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"manage configs like API endpoints in Dashboard.","long_desc":"","tags":[],"see_also":[]},"container_image_alertmanager":{"name":"container_image_alertmanager","type":"str","level":"advanced","flags":0,"default_value":"quay.io/prometheus/alertmanager:v0.28.1","min":"","max":"","enum_allowed":[],"desc":"Alertmanager container image","long_desc":"","tags":[],"see_also":[]},"container_image_base":{"name":"container_image_base","type":"str","level":"advanced","flags":1,"default_value":"quay.io/ceph/ceph","min":"","max":"","enum_allowed":[],"desc":"Container image name, without the tag","long_desc":"","tags":[],"see_also":[]},"container_image_elasticsearch":{"name":"container_image_elasticsearch","type":"str","level":"advanced","flags":0,"default_value":"quay.io/omrizeneva/elasticsearch:6.8.23","min":"","max":"","enum_allowed":[],"desc":"Elasticsearch container image","long_desc":"","tags":[],"see_also":[]},"container_image_grafana":{"name":"container_image_grafana","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/grafana:12.2.0","min":"","max":"","enum_allowed":[],"desc":"Grafana container image","long_desc":"","tags":[],"see_also":[]},"container_image_haproxy":{"name":"container_image_haproxy","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/haproxy:2.3","min":"","max":"","enum_allowed":[],"desc":"Haproxy container image","long_desc":"","tags":[],"see_also":[]},"container_image_jaeger_agent":{"name":"container_image_jaeger_agent","type":"str","level":"advanced","flags":0,"default_value":"quay.io/jaegertracing/jaeger-agent:1.29","min":"","max":"","enum_allowed":[],"desc":"Jaeger agent container image","long_desc":"","tags":[],"see_also":[]},"container_image_jaeger_collector":{"name":"container_image_jaeger_collector","type":"str","level":"advanced","flags":0,"default_value":"quay.io/jaegertracing/jaeger-collector:1.29","min":"","max":"","enum_allowed":[],"desc":"Jaeger collector container image","long_desc":"","tags":[],"see_also":[]},"container_image_jaeger_query":{"name":"container_image_jaeger_query","type":"str","level":"advanced","flags":0,"default_value":"quay.io/jaegertracing/jaeger-query:1.29","min":"","max":"","enum_allowed":[],"desc":"Jaeger query container image","long_desc":"","tags":[],"see_also":[]},"container_image_keepalived":{"name":"container_image_keepalived","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/keepalived:2.2.4","min":"","max":"","enum_allowed":[],"desc":"Keepalived container image","long_desc":"","tags":[],"see_also":[]},"container_image_loki":{"name":"container_image_loki","type":"str","level":"advanced","flags":0,"default_value":"docker.io/grafana/loki:3.0.0","min":"","max":"","enum_allowed":[],"desc":"Loki container image","long_desc":"","tags":[],"see_also":[]},"container_image_nginx":{"name":"container_image_nginx","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/nginx:sclorg-nginx-126","min":"","max":"","enum_allowed":[],"desc":"Nginx container image","long_desc":"","tags":[],"see_also":[]},"container_image_node_exporter":{"name":"container_image_node_exporter","type":"str","level":"advanced","flags":0,"default_value":"quay.io/prometheus/node-exporter:v1.9.1","min":"","max":"","enum_allowed":[],"desc":"Node exporter container image","long_desc":"","tags":[],"see_also":[]},"container_image_nvmeof":{"name":"container_image_nvmeof","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/nvmeof:1.5","min":"","max":"","enum_allowed":[],"desc":"Nvmeof container image","long_desc":"","tags":[],"see_also":[]},"container_image_oauth2_proxy":{"name":"container_image_oauth2_proxy","type":"str","level":"advanced","flags":0,"default_value":"quay.io/oauth2-proxy/oauth2-proxy:v7.6.0","min":"","max":"","enum_allowed":[],"desc":"Oauth2 proxy container image","long_desc":"","tags":[],"see_also":[]},"container_image_prometheus":{"name":"container_image_prometheus","type":"str","level":"advanced","flags":0,"default_value":"quay.io/prometheus/prometheus:v3.6.0","min":"","max":"","enum_allowed":[],"desc":"Prometheus container image","long_desc":"","tags":[],"see_also":[]},"container_image_promtail":{"name":"container_image_promtail","type":"str","level":"advanced","flags":0,"default_value":"docker.io/grafana/promtail:3.0.0","min":"","max":"","enum_allowed":[],"desc":"Promtail container image","long_desc":"","tags":[],"see_also":[]},"container_image_samba":{"name":"container_image_samba","type":"str","level":"advanced","flags":0,"default_value":"quay.io/samba.org/samba-server:ceph20-centos-amd64","min":"","max":"","enum_allowed":[],"desc":"Samba container image","long_desc":"","tags":[],"see_also":[]},"container_image_samba_metrics":{"name":"container_image_samba_metrics","type":"str","level":"advanced","flags":0,"default_value":"quay.io/samba.org/samba-metrics:ceph20-centos-amd64","min":"","max":"","enum_allowed":[],"desc":"Samba metrics container image","long_desc":"","tags":[],"see_also":[]},"container_image_snmp_gateway":{"name":"container_image_snmp_gateway","type":"str","level":"advanced","flags":0,"default_value":"docker.io/maxwo/snmp-notifier:v1.2.1","min":"","max":"","enum_allowed":[],"desc":"Snmp gateway container image","long_desc":"","tags":[],"see_also":[]},"container_init":{"name":"container_init","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Run podman/docker with `--init`","long_desc":"","tags":[],"see_also":[]},"daemon_cache_timeout":{"name":"daemon_cache_timeout","type":"secs","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"seconds to cache service (daemon) inventory","long_desc":"","tags":[],"see_also":[]},"default_cephadm_command_timeout":{"name":"default_cephadm_command_timeout","type":"int","level":"advanced","flags":0,"default_value":"900","min":"","max":"","enum_allowed":[],"desc":"Default timeout applied to cephadm commands run directly on the host (in seconds)","long_desc":"","tags":[],"see_also":[]},"default_registry":{"name":"default_registry","type":"str","level":"advanced","flags":0,"default_value":"quay.io","min":"","max":"","enum_allowed":[],"desc":"Search-registry to which we should normalize unqualified image names. This is not the default registry","long_desc":"","tags":[],"see_also":[]},"device_cache_timeout":{"name":"device_cache_timeout","type":"secs","level":"advanced","flags":0,"default_value":"1800","min":"","max":"","enum_allowed":[],"desc":"seconds to cache device inventory","long_desc":"","tags":[],"see_also":[]},"device_enhanced_scan":{"name":"device_enhanced_scan","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Use libstoragemgmt during device scans","long_desc":"","tags":[],"see_also":[]},"facts_cache_timeout":{"name":"facts_cache_timeout","type":"secs","level":"advanced","flags":0,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"seconds to cache host facts data","long_desc":"","tags":[],"see_also":[]},"grafana_dashboards_path":{"name":"grafana_dashboards_path","type":"str","level":"advanced","flags":0,"default_value":"/etc/grafana/dashboards/ceph-dashboard/","min":"","max":"","enum_allowed":[],"desc":"location of dashboards to include in grafana deployments","long_desc":"","tags":[],"see_also":[]},"host_check_interval":{"name":"host_check_interval","type":"secs","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"how frequently to perform a host check","long_desc":"","tags":[],"see_also":[]},"hw_monitoring":{"name":"hw_monitoring","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Deploy hw monitoring daemon on every host.","long_desc":"","tags":[],"see_also":[]},"inventory_list_all":{"name":"inventory_list_all","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Whether ceph-volume inventory should report more devices (mostly mappers (LVs / mpaths), partitions...)","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_refresh_metadata":{"name":"log_refresh_metadata","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Log all refresh metadata. Includes daemon, device, and host info collected regularly. Only has effect if logging at debug level","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"log to the \"cephadm\" cluster log channel\"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"manage_etc_ceph_ceph_conf":{"name":"manage_etc_ceph_ceph_conf","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Manage and own /etc/ceph/ceph.conf on the hosts.","long_desc":"","tags":[],"see_also":[]},"manage_etc_ceph_ceph_conf_hosts":{"name":"manage_etc_ceph_ceph_conf_hosts","type":"str","level":"advanced","flags":0,"default_value":"*","min":"","max":"","enum_allowed":[],"desc":"PlacementSpec describing on which hosts to manage /etc/ceph/ceph.conf","long_desc":"","tags":[],"see_also":[]},"max_count_per_host":{"name":"max_count_per_host","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"max number of daemons per service per host","long_desc":"","tags":[],"see_also":[]},"max_osd_draining_count":{"name":"max_osd_draining_count","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"max number of osds that will be drained simultaneously when osds are removed","long_desc":"","tags":[],"see_also":[]},"migration_current":{"name":"migration_current","type":"int","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"internal - do not modify","long_desc":"","tags":[],"see_also":[]},"mode":{"name":"mode","type":"str","level":"advanced","flags":0,"default_value":"root","min":"","max":"","enum_allowed":["cephadm-package","root"],"desc":"mode for remote execution of cephadm","long_desc":"","tags":[],"see_also":[]},"oob_default_addr":{"name":"oob_default_addr","type":"str","level":"advanced","flags":0,"default_value":"169.254.1.1","min":"","max":"","enum_allowed":[],"desc":"Default address for RedFish API (oob management).","long_desc":"","tags":[],"see_also":[]},"prometheus_alerts_path":{"name":"prometheus_alerts_path","type":"str","level":"advanced","flags":0,"default_value":"/etc/prometheus/ceph/ceph_default_alerts.yml","min":"","max":"","enum_allowed":[],"desc":"location of alerts to include in prometheus deployments","long_desc":"","tags":[],"see_also":[]},"registry_insecure":{"name":"registry_insecure","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Registry is to be considered insecure (no TLS available). Only for development purposes.","long_desc":"","tags":[],"see_also":[]},"registry_password":{"name":"registry_password","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Custom repository password. Only used for logging into a registry.","long_desc":"","tags":[],"see_also":[]},"registry_url":{"name":"registry_url","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Registry url for login purposes. This is not the default registry","long_desc":"","tags":[],"see_also":[]},"registry_username":{"name":"registry_username","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Custom repository username. Only used for logging into a registry.","long_desc":"","tags":[],"see_also":[]},"secure_monitoring_stack":{"name":"secure_monitoring_stack","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable TLS security for all the monitoring stack daemons","long_desc":"","tags":[],"see_also":[]},"service_discovery_port":{"name":"service_discovery_port","type":"int","level":"advanced","flags":0,"default_value":"8765","min":"","max":"","enum_allowed":[],"desc":"cephadm service discovery port","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssh_config_file":{"name":"ssh_config_file","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"customized SSH config file to connect to managed hosts","long_desc":"","tags":[],"see_also":[]},"ssh_keepalive_count_max":{"name":"ssh_keepalive_count_max","type":"int","level":"advanced","flags":0,"default_value":"3","min":"","max":"","enum_allowed":[],"desc":"How many times ssh connections can fail liveness checks before the host is marked offline","long_desc":"","tags":[],"see_also":[]},"ssh_keepalive_interval":{"name":"ssh_keepalive_interval","type":"int","level":"advanced","flags":0,"default_value":"7","min":"","max":"","enum_allowed":[],"desc":"How often ssh connections are checked for liveness","long_desc":"","tags":[],"see_also":[]},"stray_daemon_check_interval":{"name":"stray_daemon_check_interval","type":"secs","level":"advanced","flags":0,"default_value":"1800","min":"","max":"","enum_allowed":[],"desc":"how frequently cephadm should check for the presence of stray daemons","long_desc":"","tags":[],"see_also":[]},"use_agent":{"name":"use_agent","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Use cephadm agent on each host to gather and send metadata","long_desc":"","tags":[],"see_also":[]},"use_repo_digest":{"name":"use_repo_digest","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Automatically convert image tags to image digest. Make sure all daemons use the same image","long_desc":"","tags":[],"see_also":[]},"warn_on_failed_host_check":{"name":"warn_on_failed_host_check","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"raise a health warning if the host check fails","long_desc":"","tags":[],"see_also":[]},"warn_on_stray_daemons":{"name":"warn_on_stray_daemons","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"raise a health warning if daemons are detected that are not managed by cephadm","long_desc":"","tags":[],"see_also":[]},"warn_on_stray_hosts":{"name":"warn_on_stray_hosts","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"raise a health warning if daemons are detected on a host that is not managed by cephadm","long_desc":"","tags":[],"see_also":[]}}},{"name":"crash","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"retain_interval":{"name":"retain_interval","type":"secs","level":"advanced","flags":1,"default_value":"31536000","min":"","max":"","enum_allowed":[],"desc":"how long to retain crashes before pruning them","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"warn_recent_interval":{"name":"warn_recent_interval","type":"secs","level":"advanced","flags":1,"default_value":"1209600","min":"","max":"","enum_allowed":[],"desc":"time interval in which to warn about recent crashes","long_desc":"","tags":[],"see_also":[]}}},{"name":"dashboard","can_run":true,"error_string":"","module_options":{"ACCOUNT_LOCKOUT_ATTEMPTS":{"name":"ACCOUNT_LOCKOUT_ATTEMPTS","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ALERTMANAGER_API_HOST":{"name":"ALERTMANAGER_API_HOST","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ALERTMANAGER_API_SSL_VERIFY":{"name":"ALERTMANAGER_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"AUDIT_API_ENABLED":{"name":"AUDIT_API_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"AUDIT_API_LOG_PAYLOAD":{"name":"AUDIT_API_LOG_PAYLOAD","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ENABLE_BROWSABLE_API":{"name":"ENABLE_BROWSABLE_API","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_CEPHFS":{"name":"FEATURE_TOGGLE_CEPHFS","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_DASHBOARD":{"name":"FEATURE_TOGGLE_DASHBOARD","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_ISCSI":{"name":"FEATURE_TOGGLE_ISCSI","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_MIRRORING":{"name":"FEATURE_TOGGLE_MIRRORING","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_NFS":{"name":"FEATURE_TOGGLE_NFS","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_RBD":{"name":"FEATURE_TOGGLE_RBD","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_RGW":{"name":"FEATURE_TOGGLE_RGW","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GANESHA_CLUSTERS_RADOS_POOL_NAMESPACE":{"name":"GANESHA_CLUSTERS_RADOS_POOL_NAMESPACE","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_PASSWORD":{"name":"GRAFANA_API_PASSWORD","type":"str","level":"advanced","flags":0,"default_value":"admin","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_SSL_VERIFY":{"name":"GRAFANA_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_URL":{"name":"GRAFANA_API_URL","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_USERNAME":{"name":"GRAFANA_API_USERNAME","type":"str","level":"advanced","flags":0,"default_value":"admin","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_FRONTEND_API_URL":{"name":"GRAFANA_FRONTEND_API_URL","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_UPDATE_DASHBOARDS":{"name":"GRAFANA_UPDATE_DASHBOARDS","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ISCSI_API_SSL_VERIFICATION":{"name":"ISCSI_API_SSL_VERIFICATION","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ISSUE_TRACKER_API_KEY":{"name":"ISSUE_TRACKER_API_KEY","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"MANAGED_BY_CLUSTERS":{"name":"MANAGED_BY_CLUSTERS","type":"str","level":"advanced","flags":0,"default_value":"[]","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"MULTICLUSTER_CONFIG":{"name":"MULTICLUSTER_CONFIG","type":"str","level":"advanced","flags":0,"default_value":"{}","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PROMETHEUS_API_HOST":{"name":"PROMETHEUS_API_HOST","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PROMETHEUS_API_SSL_VERIFY":{"name":"PROMETHEUS_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_COMPLEXITY_ENABLED":{"name":"PWD_POLICY_CHECK_COMPLEXITY_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_EXCLUSION_LIST_ENABLED":{"name":"PWD_POLICY_CHECK_EXCLUSION_LIST_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_LENGTH_ENABLED":{"name":"PWD_POLICY_CHECK_LENGTH_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_OLDPWD_ENABLED":{"name":"PWD_POLICY_CHECK_OLDPWD_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_REPETITIVE_CHARS_ENABLED":{"name":"PWD_POLICY_CHECK_REPETITIVE_CHARS_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_SEQUENTIAL_CHARS_ENABLED":{"name":"PWD_POLICY_CHECK_SEQUENTIAL_CHARS_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_USERNAME_ENABLED":{"name":"PWD_POLICY_CHECK_USERNAME_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_ENABLED":{"name":"PWD_POLICY_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_EXCLUSION_LIST":{"name":"PWD_POLICY_EXCLUSION_LIST","type":"str","level":"advanced","flags":0,"default_value":"osd,host,dashboard,pool,block,nfs,ceph,monitors,gateway,logs,crush,maps","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_MIN_COMPLEXITY":{"name":"PWD_POLICY_MIN_COMPLEXITY","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_MIN_LENGTH":{"name":"PWD_POLICY_MIN_LENGTH","type":"int","level":"advanced","flags":0,"default_value":"8","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"REST_REQUESTS_TIMEOUT":{"name":"REST_REQUESTS_TIMEOUT","type":"int","level":"advanced","flags":0,"default_value":"45","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_ACCESS_KEY":{"name":"RGW_API_ACCESS_KEY","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_ADMIN_RESOURCE":{"name":"RGW_API_ADMIN_RESOURCE","type":"str","level":"advanced","flags":0,"default_value":"admin","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_SECRET_KEY":{"name":"RGW_API_SECRET_KEY","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_SSL_VERIFY":{"name":"RGW_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_HOSTNAME_PER_DAEMON":{"name":"RGW_HOSTNAME_PER_DAEMON","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"UNSAFE_TLS_v1_2":{"name":"UNSAFE_TLS_v1_2","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"USER_PWD_EXPIRATION_SPAN":{"name":"USER_PWD_EXPIRATION_SPAN","type":"int","level":"advanced","flags":0,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"USER_PWD_EXPIRATION_WARNING_1":{"name":"USER_PWD_EXPIRATION_WARNING_1","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"USER_PWD_EXPIRATION_WARNING_2":{"name":"USER_PWD_EXPIRATION_WARNING_2","type":"int","level":"advanced","flags":0,"default_value":"5","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"cross_origin_url":{"name":"cross_origin_url","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"crt_file":{"name":"crt_file","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"debug":{"name":"debug","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable/disable debug options","long_desc":"","tags":[],"see_also":[]},"jwt_token_ttl":{"name":"jwt_token_ttl","type":"int","level":"advanced","flags":0,"default_value":"28800","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"key_file":{"name":"key_file","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"motd":{"name":"motd","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"The message of the day","long_desc":"","tags":[],"see_also":[]},"redirect_resolve_ip_addr":{"name":"redirect_resolve_ip_addr","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_addr":{"name":"server_addr","type":"str","level":"advanced","flags":0,"default_value":"::","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_port":{"name":"server_port","type":"int","level":"advanced","flags":0,"default_value":"8080","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssl":{"name":"ssl","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssl_server_port":{"name":"ssl_server_port","type":"int","level":"advanced","flags":0,"default_value":"8443","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sso_oauth2":{"name":"sso_oauth2","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_behaviour":{"name":"standby_behaviour","type":"str","level":"advanced","flags":0,"default_value":"redirect","min":"","max":"","enum_allowed":["error","redirect"],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_error_status_code":{"name":"standby_error_status_code","type":"int","level":"advanced","flags":0,"default_value":"500","min":"400","max":"599","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"url_prefix":{"name":"url_prefix","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"devicehealth","can_run":true,"error_string":"","module_options":{"enable_monitoring":{"name":"enable_monitoring","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"monitor device health metrics","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"mark_out_threshold":{"name":"mark_out_threshold","type":"secs","level":"advanced","flags":1,"default_value":"2419200","min":"","max":"","enum_allowed":[],"desc":"automatically mark OSD if it may fail before this long","long_desc":"","tags":[],"see_also":[]},"pool_name":{"name":"pool_name","type":"str","level":"advanced","flags":1,"default_value":"device_health_metrics","min":"","max":"","enum_allowed":[],"desc":"name of pool in which to store device health metrics","long_desc":"","tags":[],"see_also":[]},"retention_period":{"name":"retention_period","type":"secs","level":"advanced","flags":1,"default_value":"15552000","min":"","max":"","enum_allowed":[],"desc":"how long to retain device health metrics","long_desc":"","tags":[],"see_also":[]},"scrape_frequency":{"name":"scrape_frequency","type":"secs","level":"advanced","flags":1,"default_value":"86400","min":"","max":"","enum_allowed":[],"desc":"how frequently to scrape device health metrics","long_desc":"","tags":[],"see_also":[]},"self_heal":{"name":"self_heal","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"preemptively heal cluster around devices that may fail","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":1,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"how frequently to wake up and check device health","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"warn_threshold":{"name":"warn_threshold","type":"secs","level":"advanced","flags":1,"default_value":"7257600","min":"","max":"","enum_allowed":[],"desc":"raise health warning if OSD may fail before this long","long_desc":"","tags":[],"see_also":[]}}},{"name":"diskprediction_local","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"predict_interval":{"name":"predict_interval","type":"str","level":"advanced","flags":0,"default_value":"86400","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"predictor_model":{"name":"predictor_model","type":"str","level":"advanced","flags":0,"default_value":"prophetstor","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"str","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"influx","can_run":true,"error_string":"","module_options":{"batch_size":{"name":"batch_size","type":"int","level":"advanced","flags":0,"default_value":"5000","min":"","max":"","enum_allowed":[],"desc":"How big batches of data points should be when sending to InfluxDB.","long_desc":"","tags":[],"see_also":[]},"database":{"name":"database","type":"str","level":"advanced","flags":0,"default_value":"ceph","min":"","max":"","enum_allowed":[],"desc":"InfluxDB database name. You will need to create this database and grant write privileges to the configured username or the username must have admin privileges to create it.","long_desc":"","tags":[],"see_also":[]},"hostname":{"name":"hostname","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"InfluxDB server hostname","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"secs","level":"advanced","flags":0,"default_value":"30","min":"5","max":"","enum_allowed":[],"desc":"Time between reports to InfluxDB. Default 30 seconds.","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"password":{"name":"password","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"password of InfluxDB server user","long_desc":"","tags":[],"see_also":[]},"port":{"name":"port","type":"int","level":"advanced","flags":0,"default_value":"8086","min":"","max":"","enum_allowed":[],"desc":"InfluxDB server port","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssl":{"name":"ssl","type":"str","level":"advanced","flags":0,"default_value":"false","min":"","max":"","enum_allowed":[],"desc":"Use https connection for InfluxDB server. Use \"true\" or \"false\".","long_desc":"","tags":[],"see_also":[]},"threads":{"name":"threads","type":"int","level":"advanced","flags":0,"default_value":"5","min":"1","max":"32","enum_allowed":[],"desc":"How many worker threads should be spawned for sending data to InfluxDB.","long_desc":"","tags":[],"see_also":[]},"username":{"name":"username","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"username of InfluxDB server user","long_desc":"","tags":[],"see_also":[]},"verify_ssl":{"name":"verify_ssl","type":"str","level":"advanced","flags":0,"default_value":"true","min":"","max":"","enum_allowed":[],"desc":"Verify https cert for InfluxDB server. Use \"true\" or \"false\".","long_desc":"","tags":[],"see_also":[]}}},{"name":"insights","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"iostat","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"k8sevents","can_run":true,"error_string":"","module_options":{"ceph_event_retention_days":{"name":"ceph_event_retention_days","type":"int","level":"advanced","flags":0,"default_value":"7","min":"","max":"","enum_allowed":[],"desc":"Days to hold ceph event information within local cache","long_desc":"","tags":[],"see_also":[]},"config_check_secs":{"name":"config_check_secs","type":"int","level":"advanced","flags":0,"default_value":"10","min":"10","max":"","enum_allowed":[],"desc":"interval (secs) to check for cluster configuration changes","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"localpool","can_run":true,"error_string":"","module_options":{"failure_domain":{"name":"failure_domain","type":"str","level":"advanced","flags":1,"default_value":"host","min":"","max":"","enum_allowed":[],"desc":"failure domain for any created local pool","long_desc":"what failure domain we should separate data replicas across.","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"min_size":{"name":"min_size","type":"int","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"default min_size for any created local pool","long_desc":"value to set min_size to (unchanged from Ceph's default if this option is not set)","tags":[],"see_also":[]},"num_rep":{"name":"num_rep","type":"int","level":"advanced","flags":1,"default_value":"3","min":"","max":"","enum_allowed":[],"desc":"default replica count for any created local pool","long_desc":"","tags":[],"see_also":[]},"pg_num":{"name":"pg_num","type":"int","level":"advanced","flags":1,"default_value":"128","min":"","max":"","enum_allowed":[],"desc":"default pg_num for any created local pool","long_desc":"","tags":[],"see_also":[]},"prefix":{"name":"prefix","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"name prefix for any created local pool","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"subtree":{"name":"subtree","type":"str","level":"advanced","flags":1,"default_value":"rack","min":"","max":"","enum_allowed":[],"desc":"CRUSH level for which to create a local pool","long_desc":"which CRUSH subtree type the module should create a pool for.","tags":[],"see_also":[]}}},{"name":"mds_autoscaler","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"mirroring","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"nfs","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"orchestrator","can_run":true,"error_string":"","module_options":{"fail_fs":{"name":"fail_fs","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Fail filesystem for rapid multi-rank mds upgrade","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"orchestrator":{"name":"orchestrator","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["cephadm","rook","test_orchestrator"],"desc":"Orchestrator backend","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"osd_perf_query","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"osd_support","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"pg_autoscaler","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":0,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"threshold":{"name":"threshold","type":"float","level":"advanced","flags":0,"default_value":"3.0","min":"1.0","max":"","enum_allowed":[],"desc":"scaling threshold","long_desc":"The factor by which the `NEW PG_NUM` must vary from the current`PG_NUM` before being accepted. Cannot be less than 1.0","tags":[],"see_also":[]}}},{"name":"progress","can_run":true,"error_string":"","module_options":{"allow_pg_recovery_event":{"name":"allow_pg_recovery_event","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"allow the module to show pg recovery progress","long_desc":"","tags":[],"see_also":[]},"enabled":{"name":"enabled","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"max_completed_events":{"name":"max_completed_events","type":"int","level":"advanced","flags":1,"default_value":"50","min":"","max":"","enum_allowed":[],"desc":"number of past completed events to remember","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":1,"default_value":"5","min":"","max":"","enum_allowed":[],"desc":"how long the module is going to sleep","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"prometheus","can_run":true,"error_string":"","module_options":{"cache":{"name":"cache","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"exclude_perf_counters":{"name":"exclude_perf_counters","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Do not include perf-counters in the metrics output","long_desc":"Gathering perf-counters from a single Prometheus exporter can degrade ceph-mgr performance, especially in large clusters. Instead, Ceph-exporter daemons are now used by default for perf-counter gathering. This should only be disabled when no ceph-exporters are deployed.","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rbd_stats_pools":{"name":"rbd_stats_pools","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rbd_stats_pools_refresh_interval":{"name":"rbd_stats_pools_refresh_interval","type":"int","level":"advanced","flags":0,"default_value":"300","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"scrape_interval":{"name":"scrape_interval","type":"float","level":"advanced","flags":0,"default_value":"15.0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_addr":{"name":"server_addr","type":"str","level":"advanced","flags":0,"default_value":"::","min":"","max":"","enum_allowed":[],"desc":"the IPv4 or IPv6 address on which the module listens for HTTP requests","long_desc":"","tags":[],"see_also":[]},"server_port":{"name":"server_port","type":"int","level":"advanced","flags":1,"default_value":"9283","min":"","max":"","enum_allowed":[],"desc":"the port on which the module listens for HTTP requests","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"stale_cache_strategy":{"name":"stale_cache_strategy","type":"str","level":"advanced","flags":0,"default_value":"log","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_behaviour":{"name":"standby_behaviour","type":"str","level":"advanced","flags":1,"default_value":"default","min":"","max":"","enum_allowed":["default","error"],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_error_status_code":{"name":"standby_error_status_code","type":"int","level":"advanced","flags":1,"default_value":"500","min":"400","max":"599","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"rbd_support","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"max_concurrent_snap_create":{"name":"max_concurrent_snap_create","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"mirror_snapshot_schedule":{"name":"mirror_snapshot_schedule","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"trash_purge_schedule":{"name":"trash_purge_schedule","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"rgw","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"secondary_zone_period_retry_limit":{"name":"secondary_zone_period_retry_limit","type":"int","level":"advanced","flags":0,"default_value":"5","min":"","max":"","enum_allowed":[],"desc":"RGW module period update retry limit for secondary site","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"rook","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"prometheus_tls_secret_name":{"name":"prometheus_tls_secret_name","type":"str","level":"advanced","flags":0,"default_value":"rook-ceph-prometheus-server-tls","min":"","max":"","enum_allowed":[],"desc":"name of tls secret in k8s for prometheus","long_desc":"","tags":[],"see_also":[]},"secure_monitoring_stack":{"name":"secure_monitoring_stack","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable TLS security for all the monitoring stack daemons","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"storage_class":{"name":"storage_class","type":"str","level":"advanced","flags":0,"default_value":"local","min":"","max":"","enum_allowed":[],"desc":"storage class name for LSO-discovered PVs","long_desc":"","tags":[],"see_also":[]}}},{"name":"selftest","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"roption1":{"name":"roption1","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"roption2":{"name":"roption2","type":"str","level":"advanced","flags":0,"default_value":"xyz","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption1":{"name":"rwoption1","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption2":{"name":"rwoption2","type":"int","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption3":{"name":"rwoption3","type":"float","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption4":{"name":"rwoption4","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption5":{"name":"rwoption5","type":"bool","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption6":{"name":"rwoption6","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption7":{"name":"rwoption7","type":"int","level":"advanced","flags":0,"default_value":"","min":"1","max":"42","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"testkey":{"name":"testkey","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"testlkey":{"name":"testlkey","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"testnewline":{"name":"testnewline","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"smb","can_run":true,"error_string":"","module_options":{"internal_store_backend":{"name":"internal_store_backend","type":"str","level":"dev","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"set internal store backend. for develoment and testing only","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"update_orchestration":{"name":"update_orchestration","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"automatically update orchestration when smb resources are changed","long_desc":"","tags":[],"see_also":[]}}},{"name":"snap_schedule","can_run":true,"error_string":"","module_options":{"allow_m_granularity":{"name":"allow_m_granularity","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"allow minute scheduled snapshots","long_desc":"","tags":[],"see_also":[]},"dump_on_update":{"name":"dump_on_update","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"dump database to debug log on update","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"stats","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"status","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"telegraf","can_run":true,"error_string":"","module_options":{"address":{"name":"address","type":"str","level":"advanced","flags":0,"default_value":"unixgram:///tmp/telegraf.sock","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"secs","level":"advanced","flags":0,"default_value":"15","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"telemetry","can_run":true,"error_string":"","module_options":{"channel_basic":{"name":"channel_basic","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Share basic cluster information (size, version)","long_desc":"","tags":[],"see_also":[]},"channel_crash":{"name":"channel_crash","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Share metadata about Ceph daemon crashes (version, stack straces, etc)","long_desc":"","tags":[],"see_also":[]},"channel_device":{"name":"channel_device","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Share device health metrics (e.g., SMART data, minus potentially identifying info like serial numbers)","long_desc":"","tags":[],"see_also":[]},"channel_ident":{"name":"channel_ident","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Share a user-provided description and/or contact email for the cluster","long_desc":"","tags":[],"see_also":[]},"channel_perf":{"name":"channel_perf","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Share various performance metrics of a cluster","long_desc":"","tags":[],"see_also":[]},"contact":{"name":"contact","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"description":{"name":"description","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"device_url":{"name":"device_url","type":"str","level":"advanced","flags":0,"default_value":"https://telemetry.ceph.com/device","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"enabled":{"name":"enabled","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"int","level":"advanced","flags":0,"default_value":"24","min":"8","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"last_opt_revision":{"name":"last_opt_revision","type":"int","level":"advanced","flags":0,"default_value":"1","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"leaderboard":{"name":"leaderboard","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"leaderboard_description":{"name":"leaderboard_description","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"organization":{"name":"organization","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"proxy":{"name":"proxy","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"url":{"name":"url","type":"str","level":"advanced","flags":0,"default_value":"https://telemetry.ceph.com/report","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"test_orchestrator","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"volumes","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"max_concurrent_clones":{"name":"max_concurrent_clones","type":"int","level":"advanced","flags":0,"default_value":"4","min":"","max":"","enum_allowed":[],"desc":"Number of asynchronous cloner threads","long_desc":"","tags":[],"see_also":[]},"pause_cloning":{"name":"pause_cloning","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Pause asynchronous cloner threads","long_desc":"","tags":[],"see_also":[]},"pause_purging":{"name":"pause_purging","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Pause asynchronous subvolume purge threads","long_desc":"","tags":[],"see_also":[]},"periodic_async_work":{"name":"periodic_async_work","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Periodically check for async work","long_desc":"","tags":[],"see_also":[]},"snapshot_clone_delay":{"name":"snapshot_clone_delay","type":"int","level":"advanced","flags":0,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"Delay clone begin operation by snapshot_clone_delay seconds","long_desc":"","tags":[],"see_also":[]},"snapshot_clone_no_wait":{"name":"snapshot_clone_no_wait","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Reject subvolume clone request when cloner threads are busy","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}}]}],"modules":["iostat","nfs"],"available_modules":[{"name":"alerts","can_run":true,"error_string":"","module_options":{"interval":{"name":"interval","type":"secs","level":"advanced","flags":1,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"How frequently to reexamine health status","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"smtp_destination":{"name":"smtp_destination","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Email address to send alerts to, use commas to separate multiple","long_desc":"","tags":[],"see_also":[]},"smtp_from_name":{"name":"smtp_from_name","type":"str","level":"advanced","flags":1,"default_value":"Ceph","min":"","max":"","enum_allowed":[],"desc":"Email From: name","long_desc":"","tags":[],"see_also":[]},"smtp_host":{"name":"smtp_host","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"SMTP server","long_desc":"","tags":[],"see_also":[]},"smtp_password":{"name":"smtp_password","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Password to authenticate with","long_desc":"","tags":[],"see_also":[]},"smtp_port":{"name":"smtp_port","type":"int","level":"advanced","flags":1,"default_value":"465","min":"","max":"","enum_allowed":[],"desc":"SMTP port","long_desc":"","tags":[],"see_also":[]},"smtp_sender":{"name":"smtp_sender","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"SMTP envelope sender","long_desc":"","tags":[],"see_also":[]},"smtp_ssl":{"name":"smtp_ssl","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Use SSL to connect to SMTP server","long_desc":"","tags":[],"see_also":[]},"smtp_user":{"name":"smtp_user","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"User to authenticate as","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"balancer","can_run":true,"error_string":"","module_options":{"active":{"name":"active","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"automatically balance PGs across cluster","long_desc":"","tags":[],"see_also":[]},"begin_time":{"name":"begin_time","type":"str","level":"advanced","flags":1,"default_value":"0000","min":"","max":"","enum_allowed":[],"desc":"beginning time of day to automatically balance","long_desc":"This is a time of day in the format HHMM.","tags":[],"see_also":[]},"begin_weekday":{"name":"begin_weekday","type":"uint","level":"advanced","flags":1,"default_value":"0","min":"0","max":"6","enum_allowed":[],"desc":"Restrict automatic balancing to this day of the week or later","long_desc":"0 = Sunday, 1 = Monday, etc.","tags":[],"see_also":[]},"crush_compat_max_iterations":{"name":"crush_compat_max_iterations","type":"uint","level":"advanced","flags":1,"default_value":"25","min":"1","max":"250","enum_allowed":[],"desc":"maximum number of iterations to attempt optimization","long_desc":"","tags":[],"see_also":[]},"crush_compat_metrics":{"name":"crush_compat_metrics","type":"str","level":"advanced","flags":1,"default_value":"pgs,objects,bytes","min":"","max":"","enum_allowed":[],"desc":"metrics with which to calculate OSD utilization","long_desc":"Value is a list of one or more of \"pgs\", \"objects\", or \"bytes\", and indicates which metrics to use to balance utilization.","tags":[],"see_also":[]},"crush_compat_step":{"name":"crush_compat_step","type":"float","level":"advanced","flags":1,"default_value":"0.5","min":"0.001","max":"0.999","enum_allowed":[],"desc":"aggressiveness of optimization","long_desc":".99 is very aggressive, .01 is less aggressive","tags":[],"see_also":[]},"end_time":{"name":"end_time","type":"str","level":"advanced","flags":1,"default_value":"2359","min":"","max":"","enum_allowed":[],"desc":"ending time of day to automatically balance","long_desc":"This is a time of day in the format HHMM.","tags":[],"see_also":[]},"end_weekday":{"name":"end_weekday","type":"uint","level":"advanced","flags":1,"default_value":"0","min":"0","max":"6","enum_allowed":[],"desc":"Restrict automatic balancing to days of the week earlier than this","long_desc":"0 = Sunday, 1 = Monday, etc.","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"min_score":{"name":"min_score","type":"float","level":"advanced","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"minimum score, below which no optimization is attempted","long_desc":"","tags":[],"see_also":[]},"mode":{"name":"mode","type":"str","level":"advanced","flags":1,"default_value":"upmap","min":"","max":"","enum_allowed":["crush-compat","none","read","upmap","upmap-read"],"desc":"Balancer mode","long_desc":"","tags":[],"see_also":[]},"pool_ids":{"name":"pool_ids","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"pools which the automatic balancing will be limited to","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":1,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"how frequently to wake up and attempt optimization","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"update_pg_upmap_activity":{"name":"update_pg_upmap_activity","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Updates pg_upmap activity stats to be used in `balancer status detail`","long_desc":"","tags":[],"see_also":[]},"upmap_max_deviation":{"name":"upmap_max_deviation","type":"int","level":"advanced","flags":1,"default_value":"5","min":"1","max":"","enum_allowed":[],"desc":"deviation below which no optimization is attempted","long_desc":"If the number of PGs are within this count then no optimization is attempted","tags":[],"see_also":[]},"upmap_max_optimizations":{"name":"upmap_max_optimizations","type":"uint","level":"advanced","flags":1,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"maximum upmap optimizations to make per attempt","long_desc":"","tags":[],"see_also":[]}}},{"name":"cephadm","can_run":true,"error_string":"","module_options":{"agent_down_multiplier":{"name":"agent_down_multiplier","type":"float","level":"advanced","flags":0,"default_value":"3.0","min":"","max":"","enum_allowed":[],"desc":"Multiplied by agent refresh rate to calculate how long agent must not report before being marked down","long_desc":"","tags":[],"see_also":[]},"agent_refresh_rate":{"name":"agent_refresh_rate","type":"secs","level":"advanced","flags":0,"default_value":"20","min":"","max":"","enum_allowed":[],"desc":"How often agent on each host will try to gather and send metadata","long_desc":"","tags":[],"see_also":[]},"agent_starting_port":{"name":"agent_starting_port","type":"int","level":"advanced","flags":0,"default_value":"4721","min":"","max":"","enum_allowed":[],"desc":"First port agent will try to bind to (will also try up to next 1000 subsequent ports if blocked)","long_desc":"","tags":[],"see_also":[]},"allow_ptrace":{"name":"allow_ptrace","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"allow SYS_PTRACE capability on ceph containers","long_desc":"The SYS_PTRACE capability is needed to attach to a process with gdb or strace. Enabling this options can allow debugging daemons that encounter problems at runtime.","tags":[],"see_also":[]},"autotune_interval":{"name":"autotune_interval","type":"secs","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"how frequently to autotune daemon memory","long_desc":"","tags":[],"see_also":[]},"autotune_memory_target_ratio":{"name":"autotune_memory_target_ratio","type":"float","level":"advanced","flags":0,"default_value":"0.7","min":"","max":"","enum_allowed":[],"desc":"ratio of total system memory to divide amongst autotuned daemons","long_desc":"","tags":[],"see_also":[]},"cephadm_log_destination":{"name":"cephadm_log_destination","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":["file","file,syslog","syslog"],"desc":"Destination for cephadm command's persistent logging","long_desc":"","tags":[],"see_also":[]},"certificate_automated_rotation_enabled":{"name":"certificate_automated_rotation_enabled","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"This flag controls whether cephadm automatically rotates certificates upon expiration.","long_desc":"","tags":[],"see_also":[]},"certificate_check_debug_mode":{"name":"certificate_check_debug_mode","type":"bool","level":"dev","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"FOR TESTING ONLY: This flag forces the certificate check instead of waiting for certificate_check_period.","long_desc":"","tags":[],"see_also":[]},"certificate_check_period":{"name":"certificate_check_period","type":"int","level":"advanced","flags":0,"default_value":"1","min":"0","max":"30","enum_allowed":[],"desc":"Specifies how often (in days) the certificate should be checked for validity.","long_desc":"","tags":[],"see_also":[]},"certificate_duration_days":{"name":"certificate_duration_days","type":"int","level":"advanced","flags":0,"default_value":"1095","min":"90","max":"3650","enum_allowed":[],"desc":"Specifies the duration of self certificates generated and signed by cephadm root CA","long_desc":"","tags":[],"see_also":[]},"certificate_renewal_threshold_days":{"name":"certificate_renewal_threshold_days","type":"int","level":"advanced","flags":0,"default_value":"30","min":"10","max":"90","enum_allowed":[],"desc":"Specifies the lead time in days to initiate certificate renewal before expiration.","long_desc":"","tags":[],"see_also":[]},"cgroups_split":{"name":"cgroups_split","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Pass --cgroups=split when cephadm creates containers (currently podman only)","long_desc":"","tags":[],"see_also":[]},"config_checks_enabled":{"name":"config_checks_enabled","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable or disable the cephadm configuration analysis","long_desc":"","tags":[],"see_also":[]},"config_dashboard":{"name":"config_dashboard","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"manage configs like API endpoints in Dashboard.","long_desc":"","tags":[],"see_also":[]},"container_image_alertmanager":{"name":"container_image_alertmanager","type":"str","level":"advanced","flags":0,"default_value":"quay.io/prometheus/alertmanager:v0.28.1","min":"","max":"","enum_allowed":[],"desc":"Alertmanager container image","long_desc":"","tags":[],"see_also":[]},"container_image_base":{"name":"container_image_base","type":"str","level":"advanced","flags":1,"default_value":"quay.io/ceph/ceph","min":"","max":"","enum_allowed":[],"desc":"Container image name, without the tag","long_desc":"","tags":[],"see_also":[]},"container_image_elasticsearch":{"name":"container_image_elasticsearch","type":"str","level":"advanced","flags":0,"default_value":"quay.io/omrizeneva/elasticsearch:6.8.23","min":"","max":"","enum_allowed":[],"desc":"Elasticsearch container image","long_desc":"","tags":[],"see_also":[]},"container_image_grafana":{"name":"container_image_grafana","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/grafana:12.2.0","min":"","max":"","enum_allowed":[],"desc":"Grafana container image","long_desc":"","tags":[],"see_also":[]},"container_image_haproxy":{"name":"container_image_haproxy","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/haproxy:2.3","min":"","max":"","enum_allowed":[],"desc":"Haproxy container image","long_desc":"","tags":[],"see_also":[]},"container_image_jaeger_agent":{"name":"container_image_jaeger_agent","type":"str","level":"advanced","flags":0,"default_value":"quay.io/jaegertracing/jaeger-agent:1.29","min":"","max":"","enum_allowed":[],"desc":"Jaeger agent container image","long_desc":"","tags":[],"see_also":[]},"container_image_jaeger_collector":{"name":"container_image_jaeger_collector","type":"str","level":"advanced","flags":0,"default_value":"quay.io/jaegertracing/jaeger-collector:1.29","min":"","max":"","enum_allowed":[],"desc":"Jaeger collector container image","long_desc":"","tags":[],"see_also":[]},"container_image_jaeger_query":{"name":"container_image_jaeger_query","type":"str","level":"advanced","flags":0,"default_value":"quay.io/jaegertracing/jaeger-query:1.29","min":"","max":"","enum_allowed":[],"desc":"Jaeger query container image","long_desc":"","tags":[],"see_also":[]},"container_image_keepalived":{"name":"container_image_keepalived","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/keepalived:2.2.4","min":"","max":"","enum_allowed":[],"desc":"Keepalived container image","long_desc":"","tags":[],"see_also":[]},"container_image_loki":{"name":"container_image_loki","type":"str","level":"advanced","flags":0,"default_value":"docker.io/grafana/loki:3.0.0","min":"","max":"","enum_allowed":[],"desc":"Loki container image","long_desc":"","tags":[],"see_also":[]},"container_image_nginx":{"name":"container_image_nginx","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/nginx:sclorg-nginx-126","min":"","max":"","enum_allowed":[],"desc":"Nginx container image","long_desc":"","tags":[],"see_also":[]},"container_image_node_exporter":{"name":"container_image_node_exporter","type":"str","level":"advanced","flags":0,"default_value":"quay.io/prometheus/node-exporter:v1.9.1","min":"","max":"","enum_allowed":[],"desc":"Node exporter container image","long_desc":"","tags":[],"see_also":[]},"container_image_nvmeof":{"name":"container_image_nvmeof","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/nvmeof:1.5","min":"","max":"","enum_allowed":[],"desc":"Nvmeof container image","long_desc":"","tags":[],"see_also":[]},"container_image_oauth2_proxy":{"name":"container_image_oauth2_proxy","type":"str","level":"advanced","flags":0,"default_value":"quay.io/oauth2-proxy/oauth2-proxy:v7.6.0","min":"","max":"","enum_allowed":[],"desc":"Oauth2 proxy container image","long_desc":"","tags":[],"see_also":[]},"container_image_prometheus":{"name":"container_image_prometheus","type":"str","level":"advanced","flags":0,"default_value":"quay.io/prometheus/prometheus:v3.6.0","min":"","max":"","enum_allowed":[],"desc":"Prometheus container image","long_desc":"","tags":[],"see_also":[]},"container_image_promtail":{"name":"container_image_promtail","type":"str","level":"advanced","flags":0,"default_value":"docker.io/grafana/promtail:3.0.0","min":"","max":"","enum_allowed":[],"desc":"Promtail container image","long_desc":"","tags":[],"see_also":[]},"container_image_samba":{"name":"container_image_samba","type":"str","level":"advanced","flags":0,"default_value":"quay.io/samba.org/samba-server:ceph20-centos-amd64","min":"","max":"","enum_allowed":[],"desc":"Samba container image","long_desc":"","tags":[],"see_also":[]},"container_image_samba_metrics":{"name":"container_image_samba_metrics","type":"str","level":"advanced","flags":0,"default_value":"quay.io/samba.org/samba-metrics:ceph20-centos-amd64","min":"","max":"","enum_allowed":[],"desc":"Samba metrics container image","long_desc":"","tags":[],"see_also":[]},"container_image_snmp_gateway":{"name":"container_image_snmp_gateway","type":"str","level":"advanced","flags":0,"default_value":"docker.io/maxwo/snmp-notifier:v1.2.1","min":"","max":"","enum_allowed":[],"desc":"Snmp gateway container image","long_desc":"","tags":[],"see_also":[]},"container_init":{"name":"container_init","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Run podman/docker with `--init`","long_desc":"","tags":[],"see_also":[]},"daemon_cache_timeout":{"name":"daemon_cache_timeout","type":"secs","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"seconds to cache service (daemon) inventory","long_desc":"","tags":[],"see_also":[]},"default_cephadm_command_timeout":{"name":"default_cephadm_command_timeout","type":"int","level":"advanced","flags":0,"default_value":"900","min":"","max":"","enum_allowed":[],"desc":"Default timeout applied to cephadm commands run directly on the host (in seconds)","long_desc":"","tags":[],"see_also":[]},"default_registry":{"name":"default_registry","type":"str","level":"advanced","flags":0,"default_value":"quay.io","min":"","max":"","enum_allowed":[],"desc":"Search-registry to which we should normalize unqualified image names. This is not the default registry","long_desc":"","tags":[],"see_also":[]},"device_cache_timeout":{"name":"device_cache_timeout","type":"secs","level":"advanced","flags":0,"default_value":"1800","min":"","max":"","enum_allowed":[],"desc":"seconds to cache device inventory","long_desc":"","tags":[],"see_also":[]},"device_enhanced_scan":{"name":"device_enhanced_scan","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Use libstoragemgmt during device scans","long_desc":"","tags":[],"see_also":[]},"facts_cache_timeout":{"name":"facts_cache_timeout","type":"secs","level":"advanced","flags":0,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"seconds to cache host facts data","long_desc":"","tags":[],"see_also":[]},"grafana_dashboards_path":{"name":"grafana_dashboards_path","type":"str","level":"advanced","flags":0,"default_value":"/etc/grafana/dashboards/ceph-dashboard/","min":"","max":"","enum_allowed":[],"desc":"location of dashboards to include in grafana deployments","long_desc":"","tags":[],"see_also":[]},"host_check_interval":{"name":"host_check_interval","type":"secs","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"how frequently to perform a host check","long_desc":"","tags":[],"see_also":[]},"hw_monitoring":{"name":"hw_monitoring","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Deploy hw monitoring daemon on every host.","long_desc":"","tags":[],"see_also":[]},"inventory_list_all":{"name":"inventory_list_all","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Whether ceph-volume inventory should report more devices (mostly mappers (LVs / mpaths), partitions...)","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_refresh_metadata":{"name":"log_refresh_metadata","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Log all refresh metadata. Includes daemon, device, and host info collected regularly. Only has effect if logging at debug level","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"log to the \"cephadm\" cluster log channel\"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"manage_etc_ceph_ceph_conf":{"name":"manage_etc_ceph_ceph_conf","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Manage and own /etc/ceph/ceph.conf on the hosts.","long_desc":"","tags":[],"see_also":[]},"manage_etc_ceph_ceph_conf_hosts":{"name":"manage_etc_ceph_ceph_conf_hosts","type":"str","level":"advanced","flags":0,"default_value":"*","min":"","max":"","enum_allowed":[],"desc":"PlacementSpec describing on which hosts to manage /etc/ceph/ceph.conf","long_desc":"","tags":[],"see_also":[]},"max_count_per_host":{"name":"max_count_per_host","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"max number of daemons per service per host","long_desc":"","tags":[],"see_also":[]},"max_osd_draining_count":{"name":"max_osd_draining_count","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"max number of osds that will be drained simultaneously when osds are removed","long_desc":"","tags":[],"see_also":[]},"migration_current":{"name":"migration_current","type":"int","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"internal - do not modify","long_desc":"","tags":[],"see_also":[]},"mode":{"name":"mode","type":"str","level":"advanced","flags":0,"default_value":"root","min":"","max":"","enum_allowed":["cephadm-package","root"],"desc":"mode for remote execution of cephadm","long_desc":"","tags":[],"see_also":[]},"oob_default_addr":{"name":"oob_default_addr","type":"str","level":"advanced","flags":0,"default_value":"169.254.1.1","min":"","max":"","enum_allowed":[],"desc":"Default address for RedFish API (oob management).","long_desc":"","tags":[],"see_also":[]},"prometheus_alerts_path":{"name":"prometheus_alerts_path","type":"str","level":"advanced","flags":0,"default_value":"/etc/prometheus/ceph/ceph_default_alerts.yml","min":"","max":"","enum_allowed":[],"desc":"location of alerts to include in prometheus deployments","long_desc":"","tags":[],"see_also":[]},"registry_insecure":{"name":"registry_insecure","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Registry is to be considered insecure (no TLS available). Only for development purposes.","long_desc":"","tags":[],"see_also":[]},"registry_password":{"name":"registry_password","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Custom repository password. Only used for logging into a registry.","long_desc":"","tags":[],"see_also":[]},"registry_url":{"name":"registry_url","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Registry url for login purposes. This is not the default registry","long_desc":"","tags":[],"see_also":[]},"registry_username":{"name":"registry_username","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Custom repository username. Only used for logging into a registry.","long_desc":"","tags":[],"see_also":[]},"secure_monitoring_stack":{"name":"secure_monitoring_stack","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable TLS security for all the monitoring stack daemons","long_desc":"","tags":[],"see_also":[]},"service_discovery_port":{"name":"service_discovery_port","type":"int","level":"advanced","flags":0,"default_value":"8765","min":"","max":"","enum_allowed":[],"desc":"cephadm service discovery port","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssh_config_file":{"name":"ssh_config_file","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"customized SSH config file to connect to managed hosts","long_desc":"","tags":[],"see_also":[]},"ssh_keepalive_count_max":{"name":"ssh_keepalive_count_max","type":"int","level":"advanced","flags":0,"default_value":"3","min":"","max":"","enum_allowed":[],"desc":"How many times ssh connections can fail liveness checks before the host is marked offline","long_desc":"","tags":[],"see_also":[]},"ssh_keepalive_interval":{"name":"ssh_keepalive_interval","type":"int","level":"advanced","flags":0,"default_value":"7","min":"","max":"","enum_allowed":[],"desc":"How often ssh connections are checked for liveness","long_desc":"","tags":[],"see_also":[]},"stray_daemon_check_interval":{"name":"stray_daemon_check_interval","type":"secs","level":"advanced","flags":0,"default_value":"1800","min":"","max":"","enum_allowed":[],"desc":"how frequently cephadm should check for the presence of stray daemons","long_desc":"","tags":[],"see_also":[]},"use_agent":{"name":"use_agent","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Use cephadm agent on each host to gather and send metadata","long_desc":"","tags":[],"see_also":[]},"use_repo_digest":{"name":"use_repo_digest","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Automatically convert image tags to image digest. Make sure all daemons use the same image","long_desc":"","tags":[],"see_also":[]},"warn_on_failed_host_check":{"name":"warn_on_failed_host_check","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"raise a health warning if the host check fails","long_desc":"","tags":[],"see_also":[]},"warn_on_stray_daemons":{"name":"warn_on_stray_daemons","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"raise a health warning if daemons are detected that are not managed by cephadm","long_desc":"","tags":[],"see_also":[]},"warn_on_stray_hosts":{"name":"warn_on_stray_hosts","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"raise a health warning if daemons are detected on a host that is not managed by cephadm","long_desc":"","tags":[],"see_also":[]}}},{"name":"crash","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"retain_interval":{"name":"retain_interval","type":"secs","level":"advanced","flags":1,"default_value":"31536000","min":"","max":"","enum_allowed":[],"desc":"how long to retain crashes before pruning them","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"warn_recent_interval":{"name":"warn_recent_interval","type":"secs","level":"advanced","flags":1,"default_value":"1209600","min":"","max":"","enum_allowed":[],"desc":"time interval in which to warn about recent crashes","long_desc":"","tags":[],"see_also":[]}}},{"name":"dashboard","can_run":true,"error_string":"","module_options":{"ACCOUNT_LOCKOUT_ATTEMPTS":{"name":"ACCOUNT_LOCKOUT_ATTEMPTS","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ALERTMANAGER_API_HOST":{"name":"ALERTMANAGER_API_HOST","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ALERTMANAGER_API_SSL_VERIFY":{"name":"ALERTMANAGER_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"AUDIT_API_ENABLED":{"name":"AUDIT_API_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"AUDIT_API_LOG_PAYLOAD":{"name":"AUDIT_API_LOG_PAYLOAD","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ENABLE_BROWSABLE_API":{"name":"ENABLE_BROWSABLE_API","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_CEPHFS":{"name":"FEATURE_TOGGLE_CEPHFS","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_DASHBOARD":{"name":"FEATURE_TOGGLE_DASHBOARD","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_ISCSI":{"name":"FEATURE_TOGGLE_ISCSI","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_MIRRORING":{"name":"FEATURE_TOGGLE_MIRRORING","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_NFS":{"name":"FEATURE_TOGGLE_NFS","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_RBD":{"name":"FEATURE_TOGGLE_RBD","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_RGW":{"name":"FEATURE_TOGGLE_RGW","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GANESHA_CLUSTERS_RADOS_POOL_NAMESPACE":{"name":"GANESHA_CLUSTERS_RADOS_POOL_NAMESPACE","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_PASSWORD":{"name":"GRAFANA_API_PASSWORD","type":"str","level":"advanced","flags":0,"default_value":"admin","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_SSL_VERIFY":{"name":"GRAFANA_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_URL":{"name":"GRAFANA_API_URL","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_USERNAME":{"name":"GRAFANA_API_USERNAME","type":"str","level":"advanced","flags":0,"default_value":"admin","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_FRONTEND_API_URL":{"name":"GRAFANA_FRONTEND_API_URL","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_UPDATE_DASHBOARDS":{"name":"GRAFANA_UPDATE_DASHBOARDS","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ISCSI_API_SSL_VERIFICATION":{"name":"ISCSI_API_SSL_VERIFICATION","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ISSUE_TRACKER_API_KEY":{"name":"ISSUE_TRACKER_API_KEY","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"MANAGED_BY_CLUSTERS":{"name":"MANAGED_BY_CLUSTERS","type":"str","level":"advanced","flags":0,"default_value":"[]","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"MULTICLUSTER_CONFIG":{"name":"MULTICLUSTER_CONFIG","type":"str","level":"advanced","flags":0,"default_value":"{}","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PROMETHEUS_API_HOST":{"name":"PROMETHEUS_API_HOST","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PROMETHEUS_API_SSL_VERIFY":{"name":"PROMETHEUS_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_COMPLEXITY_ENABLED":{"name":"PWD_POLICY_CHECK_COMPLEXITY_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_EXCLUSION_LIST_ENABLED":{"name":"PWD_POLICY_CHECK_EXCLUSION_LIST_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_LENGTH_ENABLED":{"name":"PWD_POLICY_CHECK_LENGTH_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_OLDPWD_ENABLED":{"name":"PWD_POLICY_CHECK_OLDPWD_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_REPETITIVE_CHARS_ENABLED":{"name":"PWD_POLICY_CHECK_REPETITIVE_CHARS_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_SEQUENTIAL_CHARS_ENABLED":{"name":"PWD_POLICY_CHECK_SEQUENTIAL_CHARS_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_USERNAME_ENABLED":{"name":"PWD_POLICY_CHECK_USERNAME_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_ENABLED":{"name":"PWD_POLICY_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_EXCLUSION_LIST":{"name":"PWD_POLICY_EXCLUSION_LIST","type":"str","level":"advanced","flags":0,"default_value":"osd,host,dashboard,pool,block,nfs,ceph,monitors,gateway,logs,crush,maps","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_MIN_COMPLEXITY":{"name":"PWD_POLICY_MIN_COMPLEXITY","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_MIN_LENGTH":{"name":"PWD_POLICY_MIN_LENGTH","type":"int","level":"advanced","flags":0,"default_value":"8","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"REST_REQUESTS_TIMEOUT":{"name":"REST_REQUESTS_TIMEOUT","type":"int","level":"advanced","flags":0,"default_value":"45","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_ACCESS_KEY":{"name":"RGW_API_ACCESS_KEY","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_ADMIN_RESOURCE":{"name":"RGW_API_ADMIN_RESOURCE","type":"str","level":"advanced","flags":0,"default_value":"admin","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_SECRET_KEY":{"name":"RGW_API_SECRET_KEY","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_SSL_VERIFY":{"name":"RGW_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_HOSTNAME_PER_DAEMON":{"name":"RGW_HOSTNAME_PER_DAEMON","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"UNSAFE_TLS_v1_2":{"name":"UNSAFE_TLS_v1_2","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"USER_PWD_EXPIRATION_SPAN":{"name":"USER_PWD_EXPIRATION_SPAN","type":"int","level":"advanced","flags":0,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"USER_PWD_EXPIRATION_WARNING_1":{"name":"USER_PWD_EXPIRATION_WARNING_1","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"USER_PWD_EXPIRATION_WARNING_2":{"name":"USER_PWD_EXPIRATION_WARNING_2","type":"int","level":"advanced","flags":0,"default_value":"5","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"cross_origin_url":{"name":"cross_origin_url","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"crt_file":{"name":"crt_file","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"debug":{"name":"debug","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable/disable debug options","long_desc":"","tags":[],"see_also":[]},"jwt_token_ttl":{"name":"jwt_token_ttl","type":"int","level":"advanced","flags":0,"default_value":"28800","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"key_file":{"name":"key_file","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"motd":{"name":"motd","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"The message of the day","long_desc":"","tags":[],"see_also":[]},"redirect_resolve_ip_addr":{"name":"redirect_resolve_ip_addr","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_addr":{"name":"server_addr","type":"str","level":"advanced","flags":0,"default_value":"::","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_port":{"name":"server_port","type":"int","level":"advanced","flags":0,"default_value":"8080","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssl":{"name":"ssl","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssl_server_port":{"name":"ssl_server_port","type":"int","level":"advanced","flags":0,"default_value":"8443","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sso_oauth2":{"name":"sso_oauth2","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_behaviour":{"name":"standby_behaviour","type":"str","level":"advanced","flags":0,"default_value":"redirect","min":"","max":"","enum_allowed":["error","redirect"],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_error_status_code":{"name":"standby_error_status_code","type":"int","level":"advanced","flags":0,"default_value":"500","min":"400","max":"599","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"url_prefix":{"name":"url_prefix","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"devicehealth","can_run":true,"error_string":"","module_options":{"enable_monitoring":{"name":"enable_monitoring","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"monitor device health metrics","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"mark_out_threshold":{"name":"mark_out_threshold","type":"secs","level":"advanced","flags":1,"default_value":"2419200","min":"","max":"","enum_allowed":[],"desc":"automatically mark OSD if it may fail before this long","long_desc":"","tags":[],"see_also":[]},"pool_name":{"name":"pool_name","type":"str","level":"advanced","flags":1,"default_value":"device_health_metrics","min":"","max":"","enum_allowed":[],"desc":"name of pool in which to store device health metrics","long_desc":"","tags":[],"see_also":[]},"retention_period":{"name":"retention_period","type":"secs","level":"advanced","flags":1,"default_value":"15552000","min":"","max":"","enum_allowed":[],"desc":"how long to retain device health metrics","long_desc":"","tags":[],"see_also":[]},"scrape_frequency":{"name":"scrape_frequency","type":"secs","level":"advanced","flags":1,"default_value":"86400","min":"","max":"","enum_allowed":[],"desc":"how frequently to scrape device health metrics","long_desc":"","tags":[],"see_also":[]},"self_heal":{"name":"self_heal","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"preemptively heal cluster around devices that may fail","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":1,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"how frequently to wake up and check device health","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"warn_threshold":{"name":"warn_threshold","type":"secs","level":"advanced","flags":1,"default_value":"7257600","min":"","max":"","enum_allowed":[],"desc":"raise health warning if OSD may fail before this long","long_desc":"","tags":[],"see_also":[]}}},{"name":"diskprediction_local","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"predict_interval":{"name":"predict_interval","type":"str","level":"advanced","flags":0,"default_value":"86400","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"predictor_model":{"name":"predictor_model","type":"str","level":"advanced","flags":0,"default_value":"prophetstor","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"str","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"influx","can_run":true,"error_string":"","module_options":{"batch_size":{"name":"batch_size","type":"int","level":"advanced","flags":0,"default_value":"5000","min":"","max":"","enum_allowed":[],"desc":"How big batches of data points should be when sending to InfluxDB.","long_desc":"","tags":[],"see_also":[]},"database":{"name":"database","type":"str","level":"advanced","flags":0,"default_value":"ceph","min":"","max":"","enum_allowed":[],"desc":"InfluxDB database name. You will need to create this database and grant write privileges to the configured username or the username must have admin privileges to create it.","long_desc":"","tags":[],"see_also":[]},"hostname":{"name":"hostname","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"InfluxDB server hostname","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"secs","level":"advanced","flags":0,"default_value":"30","min":"5","max":"","enum_allowed":[],"desc":"Time between reports to InfluxDB. Default 30 seconds.","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"password":{"name":"password","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"password of InfluxDB server user","long_desc":"","tags":[],"see_also":[]},"port":{"name":"port","type":"int","level":"advanced","flags":0,"default_value":"8086","min":"","max":"","enum_allowed":[],"desc":"InfluxDB server port","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssl":{"name":"ssl","type":"str","level":"advanced","flags":0,"default_value":"false","min":"","max":"","enum_allowed":[],"desc":"Use https connection for InfluxDB server. Use \"true\" or \"false\".","long_desc":"","tags":[],"see_also":[]},"threads":{"name":"threads","type":"int","level":"advanced","flags":0,"default_value":"5","min":"1","max":"32","enum_allowed":[],"desc":"How many worker threads should be spawned for sending data to InfluxDB.","long_desc":"","tags":[],"see_also":[]},"username":{"name":"username","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"username of InfluxDB server user","long_desc":"","tags":[],"see_also":[]},"verify_ssl":{"name":"verify_ssl","type":"str","level":"advanced","flags":0,"default_value":"true","min":"","max":"","enum_allowed":[],"desc":"Verify https cert for InfluxDB server. Use \"true\" or \"false\".","long_desc":"","tags":[],"see_also":[]}}},{"name":"insights","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"iostat","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"k8sevents","can_run":true,"error_string":"","module_options":{"ceph_event_retention_days":{"name":"ceph_event_retention_days","type":"int","level":"advanced","flags":0,"default_value":"7","min":"","max":"","enum_allowed":[],"desc":"Days to hold ceph event information within local cache","long_desc":"","tags":[],"see_also":[]},"config_check_secs":{"name":"config_check_secs","type":"int","level":"advanced","flags":0,"default_value":"10","min":"10","max":"","enum_allowed":[],"desc":"interval (secs) to check for cluster configuration changes","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"localpool","can_run":true,"error_string":"","module_options":{"failure_domain":{"name":"failure_domain","type":"str","level":"advanced","flags":1,"default_value":"host","min":"","max":"","enum_allowed":[],"desc":"failure domain for any created local pool","long_desc":"what failure domain we should separate data replicas across.","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"min_size":{"name":"min_size","type":"int","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"default min_size for any created local pool","long_desc":"value to set min_size to (unchanged from Ceph's default if this option is not set)","tags":[],"see_also":[]},"num_rep":{"name":"num_rep","type":"int","level":"advanced","flags":1,"default_value":"3","min":"","max":"","enum_allowed":[],"desc":"default replica count for any created local pool","long_desc":"","tags":[],"see_also":[]},"pg_num":{"name":"pg_num","type":"int","level":"advanced","flags":1,"default_value":"128","min":"","max":"","enum_allowed":[],"desc":"default pg_num for any created local pool","long_desc":"","tags":[],"see_also":[]},"prefix":{"name":"prefix","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"name prefix for any created local pool","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"subtree":{"name":"subtree","type":"str","level":"advanced","flags":1,"default_value":"rack","min":"","max":"","enum_allowed":[],"desc":"CRUSH level for which to create a local pool","long_desc":"which CRUSH subtree type the module should create a pool for.","tags":[],"see_also":[]}}},{"name":"mds_autoscaler","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"mirroring","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"nfs","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"orchestrator","can_run":true,"error_string":"","module_options":{"fail_fs":{"name":"fail_fs","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Fail filesystem for rapid multi-rank mds upgrade","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"orchestrator":{"name":"orchestrator","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["cephadm","rook","test_orchestrator"],"desc":"Orchestrator backend","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"osd_perf_query","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"osd_support","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"pg_autoscaler","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":0,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"threshold":{"name":"threshold","type":"float","level":"advanced","flags":0,"default_value":"3.0","min":"1.0","max":"","enum_allowed":[],"desc":"scaling threshold","long_desc":"The factor by which the `NEW PG_NUM` must vary from the current`PG_NUM` before being accepted. Cannot be less than 1.0","tags":[],"see_also":[]}}},{"name":"progress","can_run":true,"error_string":"","module_options":{"allow_pg_recovery_event":{"name":"allow_pg_recovery_event","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"allow the module to show pg recovery progress","long_desc":"","tags":[],"see_also":[]},"enabled":{"name":"enabled","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"max_completed_events":{"name":"max_completed_events","type":"int","level":"advanced","flags":1,"default_value":"50","min":"","max":"","enum_allowed":[],"desc":"number of past completed events to remember","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":1,"default_value":"5","min":"","max":"","enum_allowed":[],"desc":"how long the module is going to sleep","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"prometheus","can_run":true,"error_string":"","module_options":{"cache":{"name":"cache","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"exclude_perf_counters":{"name":"exclude_perf_counters","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Do not include perf-counters in the metrics output","long_desc":"Gathering perf-counters from a single Prometheus exporter can degrade ceph-mgr performance, especially in large clusters. Instead, Ceph-exporter daemons are now used by default for perf-counter gathering. This should only be disabled when no ceph-exporters are deployed.","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rbd_stats_pools":{"name":"rbd_stats_pools","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rbd_stats_pools_refresh_interval":{"name":"rbd_stats_pools_refresh_interval","type":"int","level":"advanced","flags":0,"default_value":"300","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"scrape_interval":{"name":"scrape_interval","type":"float","level":"advanced","flags":0,"default_value":"15.0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_addr":{"name":"server_addr","type":"str","level":"advanced","flags":0,"default_value":"::","min":"","max":"","enum_allowed":[],"desc":"the IPv4 or IPv6 address on which the module listens for HTTP requests","long_desc":"","tags":[],"see_also":[]},"server_port":{"name":"server_port","type":"int","level":"advanced","flags":1,"default_value":"9283","min":"","max":"","enum_allowed":[],"desc":"the port on which the module listens for HTTP requests","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"stale_cache_strategy":{"name":"stale_cache_strategy","type":"str","level":"advanced","flags":0,"default_value":"log","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_behaviour":{"name":"standby_behaviour","type":"str","level":"advanced","flags":1,"default_value":"default","min":"","max":"","enum_allowed":["default","error"],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_error_status_code":{"name":"standby_error_status_code","type":"int","level":"advanced","flags":1,"default_value":"500","min":"400","max":"599","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"rbd_support","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"max_concurrent_snap_create":{"name":"max_concurrent_snap_create","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"mirror_snapshot_schedule":{"name":"mirror_snapshot_schedule","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"trash_purge_schedule":{"name":"trash_purge_schedule","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"rgw","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"secondary_zone_period_retry_limit":{"name":"secondary_zone_period_retry_limit","type":"int","level":"advanced","flags":0,"default_value":"5","min":"","max":"","enum_allowed":[],"desc":"RGW module period update retry limit for secondary site","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"rook","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"prometheus_tls_secret_name":{"name":"prometheus_tls_secret_name","type":"str","level":"advanced","flags":0,"default_value":"rook-ceph-prometheus-server-tls","min":"","max":"","enum_allowed":[],"desc":"name of tls secret in k8s for prometheus","long_desc":"","tags":[],"see_also":[]},"secure_monitoring_stack":{"name":"secure_monitoring_stack","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable TLS security for all the monitoring stack daemons","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"storage_class":{"name":"storage_class","type":"str","level":"advanced","flags":0,"default_value":"local","min":"","max":"","enum_allowed":[],"desc":"storage class name for LSO-discovered PVs","long_desc":"","tags":[],"see_also":[]}}},{"name":"selftest","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"roption1":{"name":"roption1","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"roption2":{"name":"roption2","type":"str","level":"advanced","flags":0,"default_value":"xyz","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption1":{"name":"rwoption1","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption2":{"name":"rwoption2","type":"int","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption3":{"name":"rwoption3","type":"float","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption4":{"name":"rwoption4","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption5":{"name":"rwoption5","type":"bool","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption6":{"name":"rwoption6","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption7":{"name":"rwoption7","type":"int","level":"advanced","flags":0,"default_value":"","min":"1","max":"42","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"testkey":{"name":"testkey","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"testlkey":{"name":"testlkey","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"testnewline":{"name":"testnewline","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"smb","can_run":true,"error_string":"","module_options":{"internal_store_backend":{"name":"internal_store_backend","type":"str","level":"dev","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"set internal store backend. for develoment and testing only","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"update_orchestration":{"name":"update_orchestration","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"automatically update orchestration when smb resources are changed","long_desc":"","tags":[],"see_also":[]}}},{"name":"snap_schedule","can_run":true,"error_string":"","module_options":{"allow_m_granularity":{"name":"allow_m_granularity","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"allow minute scheduled snapshots","long_desc":"","tags":[],"see_also":[]},"dump_on_update":{"name":"dump_on_update","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"dump database to debug log on update","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"stats","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"status","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"telegraf","can_run":true,"error_string":"","module_options":{"address":{"name":"address","type":"str","level":"advanced","flags":0,"default_value":"unixgram:///tmp/telegraf.sock","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"secs","level":"advanced","flags":0,"default_value":"15","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"telemetry","can_run":true,"error_string":"","module_options":{"channel_basic":{"name":"channel_basic","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Share basic cluster information (size, version)","long_desc":"","tags":[],"see_also":[]},"channel_crash":{"name":"channel_crash","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Share metadata about Ceph daemon crashes (version, stack straces, etc)","long_desc":"","tags":[],"see_also":[]},"channel_device":{"name":"channel_device","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Share device health metrics (e.g., SMART data, minus potentially identifying info like serial numbers)","long_desc":"","tags":[],"see_also":[]},"channel_ident":{"name":"channel_ident","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Share a user-provided description and/or contact email for the cluster","long_desc":"","tags":[],"see_also":[]},"channel_perf":{"name":"channel_perf","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Share various performance metrics of a cluster","long_desc":"","tags":[],"see_also":[]},"contact":{"name":"contact","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"description":{"name":"description","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"device_url":{"name":"device_url","type":"str","level":"advanced","flags":0,"default_value":"https://telemetry.ceph.com/device","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"enabled":{"name":"enabled","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"int","level":"advanced","flags":0,"default_value":"24","min":"8","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"last_opt_revision":{"name":"last_opt_revision","type":"int","level":"advanced","flags":0,"default_value":"1","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"leaderboard":{"name":"leaderboard","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"leaderboard_description":{"name":"leaderboard_description","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"organization":{"name":"organization","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"proxy":{"name":"proxy","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"url":{"name":"url","type":"str","level":"advanced","flags":0,"default_value":"https://telemetry.ceph.com/report","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"test_orchestrator","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"volumes","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"max_concurrent_clones":{"name":"max_concurrent_clones","type":"int","level":"advanced","flags":0,"default_value":"4","min":"","max":"","enum_allowed":[],"desc":"Number of asynchronous cloner threads","long_desc":"","tags":[],"see_also":[]},"pause_cloning":{"name":"pause_cloning","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Pause asynchronous cloner threads","long_desc":"","tags":[],"see_also":[]},"pause_purging":{"name":"pause_purging","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Pause asynchronous subvolume purge threads","long_desc":"","tags":[],"see_also":[]},"periodic_async_work":{"name":"periodic_async_work","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Periodically check for async work","long_desc":"","tags":[],"see_also":[]},"snapshot_clone_delay":{"name":"snapshot_clone_delay","type":"int","level":"advanced","flags":0,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"Delay clone begin operation by snapshot_clone_delay seconds","long_desc":"","tags":[],"see_also":[]},"snapshot_clone_no_wait":{"name":"snapshot_clone_no_wait","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Reject subvolume clone request when cloner threads are busy","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}}],"services":{},"always_on_modules":{"octopus":["balancer","crash","devicehealth","orchestrator","pg_autoscaler","progress","rbd_support","status","telemetry","volumes"],"pacific":["balancer","crash","devicehealth","orchestrator","pg_autoscaler","progress","rbd_support","status","telemetry","volumes"],"quincy":["balancer","crash","devicehealth","orchestrator","pg_autoscaler","progress","rbd_support","status","telemetry","volumes"],"reef":["balancer","crash","devicehealth","orchestrator","pg_autoscaler","progress","rbd_support","status","telemetry","volumes"],"squid":["balancer","crash","devicehealth","orchestrator","pg_autoscaler","progress","rbd_support","status","telemetry","volumes"],"tentacle":["balancer","crash","devicehealth","orchestrator","pg_autoscaler","progress","rbd_support","status","telemetry","volumes"]},"force_disabled_modules":{},"last_failure_osd_epoch":0,"active_clients":[{"name":"devicehealth","addrvec":[{"type":"v2","addr":"192.168.123.106:0","nonce":2539870222}]},{"name":"libcephsqlite","addrvec":[{"type":"v2","addr":"192.168.123.106:0","nonce":2001544766}]},{"name":"rbd_support","addrvec":[{"type":"v2","addr":"192.168.123.106:0","nonce":864026780}]},{"name":"volumes","addrvec":[{"type":"v2","addr":"192.168.123.106:0","nonce":1064122690}]}]} 2026-04-01T02:24:37.817 INFO:tasks.ceph.ceph_manager.ceph:mgr available! 2026-04-01T02:24:37.817 INFO:tasks.ceph.ceph_manager.ceph:waiting for all up 2026-04-01T02:24:37.817 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd dump --format=json 2026-04-01T02:24:38.006 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:38.006 INFO:teuthology.orchestra.run.vm03.stdout:{"epoch":17,"fsid":"1338c6ab-9330-4cd3-91bf-71d5668f30ea","created":"2026-04-01T02:24:22.859889+0000","modified":"2026-04-01T02:24:37.318345+0000","last_up_change":"2026-04-01T02:24:33.279665+0000","last_in_change":"2026-04-01T02:24:25.426214+0000","flags":"sortbitwise,recovery_deletes,purged_snapdirs,pglog_hardlimit","flags_num":5799936,"flags_set":["pglog_hardlimit","purged_snapdirs","recovery_deletes","sortbitwise"],"crush_version":4,"full_ratio":0.94999998807907104,"backfillfull_ratio":0.89999997615814209,"nearfull_ratio":0.85000002384185791,"cluster_snapshot":"","pool_max":2,"max_osd":8,"require_min_compat_client":"luminous","min_compat_client":"jewel","require_osd_release":"tentacle","allow_crimson":false,"pools":[{"pool":1,"pool_name":"rbd","create_time":"2026-04-01T02:24:33.872129+0000","flags":8193,"flags_names":"hashpspool,selfmanaged_snaps","type":1,"size":2,"min_size":1,"crush_rule":0,"peering_crush_bucket_count":0,"peering_crush_bucket_target":0,"peering_crush_bucket_barrier":0,"peering_crush_bucket_mandatory_member":2147483647,"is_stretch_pool":false,"object_hash":2,"pg_autoscale_mode":"off","pg_num":8,"pg_placement_num":8,"pg_placement_num_target":8,"pg_num_target":8,"pg_num_pending":8,"last_pg_merge_meta":{"source_pgid":"0.0","ready_epoch":0,"last_epoch_started":0,"last_epoch_clean":0,"source_version":"0'0","target_version":"0'0"},"last_change":"17","last_force_op_resend":"0","last_force_op_resend_prenautilus":"0","last_force_op_resend_preluminous":"0","auid":0,"snap_mode":"selfmanaged","snap_seq":2,"snap_epoch":17,"pool_snaps":[],"removed_snaps":"[]","quota_max_bytes":0,"quota_max_objects":0,"tiers":[],"tier_of":-1,"read_tier":-1,"write_tier":-1,"cache_mode":"none","target_max_bytes":0,"target_max_objects":0,"cache_target_dirty_ratio_micro":400000,"cache_target_dirty_high_ratio_micro":600000,"cache_target_full_ratio_micro":800000,"cache_min_flush_age":0,"cache_min_evict_age":0,"erasure_code_profile":"","hit_set_params":{"type":"none"},"hit_set_period":0,"hit_set_count":0,"use_gmt_hitset":true,"min_read_recency_for_promote":0,"min_write_recency_for_promote":0,"hit_set_grade_decay_rate":0,"hit_set_search_last_n":0,"grade_table":[],"stripe_width":0,"expected_num_objects":0,"fast_read":false,"nonprimary_shards":"{}","options":{},"application_metadata":{"rbd":{}},"read_balance":{"score_type":"Fair distribution","score_acting":2,"score_stable":2,"optimal_score":1,"raw_score_acting":2,"raw_score_stable":2,"primary_affinity_weighted":1,"average_primary_affinity":1,"average_primary_affinity_weighted":1}},{"pool":2,"pool_name":".mgr","create_time":"2026-04-01T02:24:35.264105+0000","flags":1,"flags_names":"hashpspool","type":1,"size":2,"min_size":1,"crush_rule":0,"peering_crush_bucket_count":0,"peering_crush_bucket_target":0,"peering_crush_bucket_barrier":0,"peering_crush_bucket_mandatory_member":2147483647,"is_stretch_pool":false,"object_hash":2,"pg_autoscale_mode":"off","pg_num":1,"pg_placement_num":1,"pg_placement_num_target":1,"pg_num_target":1,"pg_num_pending":1,"last_pg_merge_meta":{"source_pgid":"0.0","ready_epoch":0,"last_epoch_started":0,"last_epoch_clean":0,"source_version":"0'0","target_version":"0'0"},"last_change":"17","last_force_op_resend":"0","last_force_op_resend_prenautilus":"0","last_force_op_resend_preluminous":"0","auid":0,"snap_mode":"selfmanaged","snap_seq":0,"snap_epoch":0,"pool_snaps":[],"removed_snaps":"[]","quota_max_bytes":0,"quota_max_objects":0,"tiers":[],"tier_of":-1,"read_tier":-1,"write_tier":-1,"cache_mode":"none","target_max_bytes":0,"target_max_objects":0,"cache_target_dirty_ratio_micro":400000,"cache_target_dirty_high_ratio_micro":600000,"cache_target_full_ratio_micro":800000,"cache_min_flush_age":0,"cache_min_evict_age":0,"erasure_code_profile":"","hit_set_params":{"type":"none"},"hit_set_period":0,"hit_set_count":0,"use_gmt_hitset":true,"min_read_recency_for_promote":0,"min_write_recency_for_promote":0,"hit_set_grade_decay_rate":0,"hit_set_search_last_n":0,"grade_table":[],"stripe_width":0,"expected_num_objects":0,"fast_read":false,"nonprimary_shards":"{}","options":{"pg_num_max":32,"pg_num_min":1},"application_metadata":{"mgr":{}},"read_balance":{"score_type":"Fair distribution","score_acting":8,"score_stable":8,"optimal_score":0.25,"raw_score_acting":2,"raw_score_stable":2,"primary_affinity_weighted":1,"average_primary_affinity":1,"average_primary_affinity_weighted":1}}],"osds":[{"osd":0,"uuid":"89e72097-68d3-4917-9d63-d02f1bf68f31","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6823","nonce":1019300300},{"type":"v1","addr":"192.168.123.103:6825","nonce":1019300300}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6826","nonce":1019300300},{"type":"v1","addr":"192.168.123.103:6827","nonce":1019300300}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6830","nonce":1019300300},{"type":"v1","addr":"192.168.123.103:6831","nonce":1019300300}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6828","nonce":1019300300},{"type":"v1","addr":"192.168.123.103:6829","nonce":1019300300}]},"public_addr":"192.168.123.103:6825/1019300300","cluster_addr":"192.168.123.103:6827/1019300300","heartbeat_back_addr":"192.168.123.103:6831/1019300300","heartbeat_front_addr":"192.168.123.103:6829/1019300300","state":["exists","up"]},{"osd":1,"uuid":"c09bed04-11ca-413a-b62a-7dbf885b020a","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":14,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6808","nonce":752174647},{"type":"v1","addr":"192.168.123.103:6809","nonce":752174647}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6810","nonce":752174647},{"type":"v1","addr":"192.168.123.103:6811","nonce":752174647}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6814","nonce":752174647},{"type":"v1","addr":"192.168.123.103:6815","nonce":752174647}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6812","nonce":752174647},{"type":"v1","addr":"192.168.123.103:6813","nonce":752174647}]},"public_addr":"192.168.123.103:6809/752174647","cluster_addr":"192.168.123.103:6811/752174647","heartbeat_back_addr":"192.168.123.103:6815/752174647","heartbeat_front_addr":"192.168.123.103:6813/752174647","state":["exists","up"]},{"osd":2,"uuid":"0e3dcff8-36fd-48b2-aced-989316025420","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6816","nonce":58483728},{"type":"v1","addr":"192.168.123.103:6817","nonce":58483728}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6818","nonce":58483728},{"type":"v1","addr":"192.168.123.103:6819","nonce":58483728}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6822","nonce":58483728},{"type":"v1","addr":"192.168.123.103:6824","nonce":58483728}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6820","nonce":58483728},{"type":"v1","addr":"192.168.123.103:6821","nonce":58483728}]},"public_addr":"192.168.123.103:6817/58483728","cluster_addr":"192.168.123.103:6819/58483728","heartbeat_back_addr":"192.168.123.103:6824/58483728","heartbeat_front_addr":"192.168.123.103:6821/58483728","state":["exists","up"]},{"osd":3,"uuid":"e32456e4-6faf-4dc1-822f-ef8a2ea3d6c2","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":14,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6800","nonce":3056993723},{"type":"v1","addr":"192.168.123.103:6801","nonce":3056993723}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6802","nonce":3056993723},{"type":"v1","addr":"192.168.123.103:6803","nonce":3056993723}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6806","nonce":3056993723},{"type":"v1","addr":"192.168.123.103:6807","nonce":3056993723}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6804","nonce":3056993723},{"type":"v1","addr":"192.168.123.103:6805","nonce":3056993723}]},"public_addr":"192.168.123.103:6801/3056993723","cluster_addr":"192.168.123.103:6803/3056993723","heartbeat_back_addr":"192.168.123.103:6807/3056993723","heartbeat_front_addr":"192.168.123.103:6805/3056993723","state":["exists","up"]},{"osd":4,"uuid":"f0f1f55b-af45-4403-bd31-41859f048b53","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":14,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6819","nonce":3132188684},{"type":"v1","addr":"192.168.123.106:6821","nonce":3132188684}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6823","nonce":3132188684},{"type":"v1","addr":"192.168.123.106:6825","nonce":3132188684}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6831","nonce":3132188684},{"type":"v1","addr":"192.168.123.106:6833","nonce":3132188684}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6827","nonce":3132188684},{"type":"v1","addr":"192.168.123.106:6829","nonce":3132188684}]},"public_addr":"192.168.123.106:6821/3132188684","cluster_addr":"192.168.123.106:6825/3132188684","heartbeat_back_addr":"192.168.123.106:6833/3132188684","heartbeat_front_addr":"192.168.123.106:6829/3132188684","state":["exists","up"]},{"osd":5,"uuid":"d88b8f6d-010d-4f77-8ded-f8a9fd351fe7","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":14,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6818","nonce":2357546250},{"type":"v1","addr":"192.168.123.106:6820","nonce":2357546250}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6822","nonce":2357546250},{"type":"v1","addr":"192.168.123.106:6824","nonce":2357546250}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6830","nonce":2357546250},{"type":"v1","addr":"192.168.123.106:6832","nonce":2357546250}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6826","nonce":2357546250},{"type":"v1","addr":"192.168.123.106:6828","nonce":2357546250}]},"public_addr":"192.168.123.106:6820/2357546250","cluster_addr":"192.168.123.106:6824/2357546250","heartbeat_back_addr":"192.168.123.106:6832/2357546250","heartbeat_front_addr":"192.168.123.106:6828/2357546250","state":["exists","up"]},{"osd":6,"uuid":"1c881e96-364e-4f1e-a73e-9b75d1db46dd","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":14,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6802","nonce":2938966295},{"type":"v1","addr":"192.168.123.106:6803","nonce":2938966295}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6804","nonce":2938966295},{"type":"v1","addr":"192.168.123.106:6805","nonce":2938966295}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6808","nonce":2938966295},{"type":"v1","addr":"192.168.123.106:6809","nonce":2938966295}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6806","nonce":2938966295},{"type":"v1","addr":"192.168.123.106:6807","nonce":2938966295}]},"public_addr":"192.168.123.106:6803/2938966295","cluster_addr":"192.168.123.106:6805/2938966295","heartbeat_back_addr":"192.168.123.106:6809/2938966295","heartbeat_front_addr":"192.168.123.106:6807/2938966295","state":["exists","up"]},{"osd":7,"uuid":"6c76eb32-2791-48e0-a9ca-205cacde72bc","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":15,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6810","nonce":4085424980},{"type":"v1","addr":"192.168.123.106:6811","nonce":4085424980}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6812","nonce":4085424980},{"type":"v1","addr":"192.168.123.106:6813","nonce":4085424980}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6816","nonce":4085424980},{"type":"v1","addr":"192.168.123.106:6817","nonce":4085424980}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6814","nonce":4085424980},{"type":"v1","addr":"192.168.123.106:6815","nonce":4085424980}]},"public_addr":"192.168.123.106:6811/4085424980","cluster_addr":"192.168.123.106:6813/4085424980","heartbeat_back_addr":"192.168.123.106:6817/4085424980","heartbeat_front_addr":"192.168.123.106:6815/4085424980","state":["exists","up"]}],"osd_xinfo":[{"osd":0,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-04-01T02:24:27.094931+0000","dead_epoch":0},{"osd":1,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-04-01T02:24:26.960504+0000","dead_epoch":0},{"osd":2,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-04-01T02:24:27.117935+0000","dead_epoch":0},{"osd":3,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-04-01T02:24:27.002489+0000","dead_epoch":0},{"osd":4,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":5,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":6,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":7,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0}],"pg_upmap":[],"pg_upmap_items":[],"pg_upmap_primaries":[],"pg_temp":[],"primary_temp":[],"blocklist":{},"range_blocklist":{},"erasure_code_profiles":{"default":{"crush-failure-domain":"osd","k":"2","m":"1","plugin":"isa","technique":"reed_sol_van"}},"removed_snaps_queue":[{"pool":1,"snaps":[{"begin":2,"length":1}]}],"new_removed_snaps":[{"pool":1,"snaps":[{"begin":2,"length":1}]}],"new_purged_snaps":[],"crush_node_flags":{},"device_class_flags":{},"stretch_mode":{"stretch_mode_enabled":false,"stretch_bucket_count":0,"degraded_stretch_mode":0,"recovering_stretch_mode":0,"stretch_mode_bucket":0}} 2026-04-01T02:24:38.016 INFO:tasks.ceph.ceph_manager.ceph:all up! 2026-04-01T02:24:38.016 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd dump --format=json 2026-04-01T02:24:38.207 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:38.207 INFO:teuthology.orchestra.run.vm03.stdout:{"epoch":17,"fsid":"1338c6ab-9330-4cd3-91bf-71d5668f30ea","created":"2026-04-01T02:24:22.859889+0000","modified":"2026-04-01T02:24:37.318345+0000","last_up_change":"2026-04-01T02:24:33.279665+0000","last_in_change":"2026-04-01T02:24:25.426214+0000","flags":"sortbitwise,recovery_deletes,purged_snapdirs,pglog_hardlimit","flags_num":5799936,"flags_set":["pglog_hardlimit","purged_snapdirs","recovery_deletes","sortbitwise"],"crush_version":4,"full_ratio":0.94999998807907104,"backfillfull_ratio":0.89999997615814209,"nearfull_ratio":0.85000002384185791,"cluster_snapshot":"","pool_max":2,"max_osd":8,"require_min_compat_client":"luminous","min_compat_client":"jewel","require_osd_release":"tentacle","allow_crimson":false,"pools":[{"pool":1,"pool_name":"rbd","create_time":"2026-04-01T02:24:33.872129+0000","flags":8193,"flags_names":"hashpspool,selfmanaged_snaps","type":1,"size":2,"min_size":1,"crush_rule":0,"peering_crush_bucket_count":0,"peering_crush_bucket_target":0,"peering_crush_bucket_barrier":0,"peering_crush_bucket_mandatory_member":2147483647,"is_stretch_pool":false,"object_hash":2,"pg_autoscale_mode":"off","pg_num":8,"pg_placement_num":8,"pg_placement_num_target":8,"pg_num_target":8,"pg_num_pending":8,"last_pg_merge_meta":{"source_pgid":"0.0","ready_epoch":0,"last_epoch_started":0,"last_epoch_clean":0,"source_version":"0'0","target_version":"0'0"},"last_change":"17","last_force_op_resend":"0","last_force_op_resend_prenautilus":"0","last_force_op_resend_preluminous":"0","auid":0,"snap_mode":"selfmanaged","snap_seq":2,"snap_epoch":17,"pool_snaps":[],"removed_snaps":"[]","quota_max_bytes":0,"quota_max_objects":0,"tiers":[],"tier_of":-1,"read_tier":-1,"write_tier":-1,"cache_mode":"none","target_max_bytes":0,"target_max_objects":0,"cache_target_dirty_ratio_micro":400000,"cache_target_dirty_high_ratio_micro":600000,"cache_target_full_ratio_micro":800000,"cache_min_flush_age":0,"cache_min_evict_age":0,"erasure_code_profile":"","hit_set_params":{"type":"none"},"hit_set_period":0,"hit_set_count":0,"use_gmt_hitset":true,"min_read_recency_for_promote":0,"min_write_recency_for_promote":0,"hit_set_grade_decay_rate":0,"hit_set_search_last_n":0,"grade_table":[],"stripe_width":0,"expected_num_objects":0,"fast_read":false,"nonprimary_shards":"{}","options":{},"application_metadata":{"rbd":{}},"read_balance":{"score_type":"Fair distribution","score_acting":2,"score_stable":2,"optimal_score":1,"raw_score_acting":2,"raw_score_stable":2,"primary_affinity_weighted":1,"average_primary_affinity":1,"average_primary_affinity_weighted":1}},{"pool":2,"pool_name":".mgr","create_time":"2026-04-01T02:24:35.264105+0000","flags":1,"flags_names":"hashpspool","type":1,"size":2,"min_size":1,"crush_rule":0,"peering_crush_bucket_count":0,"peering_crush_bucket_target":0,"peering_crush_bucket_barrier":0,"peering_crush_bucket_mandatory_member":2147483647,"is_stretch_pool":false,"object_hash":2,"pg_autoscale_mode":"off","pg_num":1,"pg_placement_num":1,"pg_placement_num_target":1,"pg_num_target":1,"pg_num_pending":1,"last_pg_merge_meta":{"source_pgid":"0.0","ready_epoch":0,"last_epoch_started":0,"last_epoch_clean":0,"source_version":"0'0","target_version":"0'0"},"last_change":"17","last_force_op_resend":"0","last_force_op_resend_prenautilus":"0","last_force_op_resend_preluminous":"0","auid":0,"snap_mode":"selfmanaged","snap_seq":0,"snap_epoch":0,"pool_snaps":[],"removed_snaps":"[]","quota_max_bytes":0,"quota_max_objects":0,"tiers":[],"tier_of":-1,"read_tier":-1,"write_tier":-1,"cache_mode":"none","target_max_bytes":0,"target_max_objects":0,"cache_target_dirty_ratio_micro":400000,"cache_target_dirty_high_ratio_micro":600000,"cache_target_full_ratio_micro":800000,"cache_min_flush_age":0,"cache_min_evict_age":0,"erasure_code_profile":"","hit_set_params":{"type":"none"},"hit_set_period":0,"hit_set_count":0,"use_gmt_hitset":true,"min_read_recency_for_promote":0,"min_write_recency_for_promote":0,"hit_set_grade_decay_rate":0,"hit_set_search_last_n":0,"grade_table":[],"stripe_width":0,"expected_num_objects":0,"fast_read":false,"nonprimary_shards":"{}","options":{"pg_num_max":32,"pg_num_min":1},"application_metadata":{"mgr":{}},"read_balance":{"score_type":"Fair distribution","score_acting":8,"score_stable":8,"optimal_score":0.25,"raw_score_acting":2,"raw_score_stable":2,"primary_affinity_weighted":1,"average_primary_affinity":1,"average_primary_affinity_weighted":1}}],"osds":[{"osd":0,"uuid":"89e72097-68d3-4917-9d63-d02f1bf68f31","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6823","nonce":1019300300},{"type":"v1","addr":"192.168.123.103:6825","nonce":1019300300}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6826","nonce":1019300300},{"type":"v1","addr":"192.168.123.103:6827","nonce":1019300300}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6830","nonce":1019300300},{"type":"v1","addr":"192.168.123.103:6831","nonce":1019300300}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6828","nonce":1019300300},{"type":"v1","addr":"192.168.123.103:6829","nonce":1019300300}]},"public_addr":"192.168.123.103:6825/1019300300","cluster_addr":"192.168.123.103:6827/1019300300","heartbeat_back_addr":"192.168.123.103:6831/1019300300","heartbeat_front_addr":"192.168.123.103:6829/1019300300","state":["exists","up"]},{"osd":1,"uuid":"c09bed04-11ca-413a-b62a-7dbf885b020a","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":14,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6808","nonce":752174647},{"type":"v1","addr":"192.168.123.103:6809","nonce":752174647}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6810","nonce":752174647},{"type":"v1","addr":"192.168.123.103:6811","nonce":752174647}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6814","nonce":752174647},{"type":"v1","addr":"192.168.123.103:6815","nonce":752174647}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6812","nonce":752174647},{"type":"v1","addr":"192.168.123.103:6813","nonce":752174647}]},"public_addr":"192.168.123.103:6809/752174647","cluster_addr":"192.168.123.103:6811/752174647","heartbeat_back_addr":"192.168.123.103:6815/752174647","heartbeat_front_addr":"192.168.123.103:6813/752174647","state":["exists","up"]},{"osd":2,"uuid":"0e3dcff8-36fd-48b2-aced-989316025420","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6816","nonce":58483728},{"type":"v1","addr":"192.168.123.103:6817","nonce":58483728}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6818","nonce":58483728},{"type":"v1","addr":"192.168.123.103:6819","nonce":58483728}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6822","nonce":58483728},{"type":"v1","addr":"192.168.123.103:6824","nonce":58483728}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6820","nonce":58483728},{"type":"v1","addr":"192.168.123.103:6821","nonce":58483728}]},"public_addr":"192.168.123.103:6817/58483728","cluster_addr":"192.168.123.103:6819/58483728","heartbeat_back_addr":"192.168.123.103:6824/58483728","heartbeat_front_addr":"192.168.123.103:6821/58483728","state":["exists","up"]},{"osd":3,"uuid":"e32456e4-6faf-4dc1-822f-ef8a2ea3d6c2","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":14,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6800","nonce":3056993723},{"type":"v1","addr":"192.168.123.103:6801","nonce":3056993723}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6802","nonce":3056993723},{"type":"v1","addr":"192.168.123.103:6803","nonce":3056993723}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6806","nonce":3056993723},{"type":"v1","addr":"192.168.123.103:6807","nonce":3056993723}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.103:6804","nonce":3056993723},{"type":"v1","addr":"192.168.123.103:6805","nonce":3056993723}]},"public_addr":"192.168.123.103:6801/3056993723","cluster_addr":"192.168.123.103:6803/3056993723","heartbeat_back_addr":"192.168.123.103:6807/3056993723","heartbeat_front_addr":"192.168.123.103:6805/3056993723","state":["exists","up"]},{"osd":4,"uuid":"f0f1f55b-af45-4403-bd31-41859f048b53","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":14,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6819","nonce":3132188684},{"type":"v1","addr":"192.168.123.106:6821","nonce":3132188684}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6823","nonce":3132188684},{"type":"v1","addr":"192.168.123.106:6825","nonce":3132188684}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6831","nonce":3132188684},{"type":"v1","addr":"192.168.123.106:6833","nonce":3132188684}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6827","nonce":3132188684},{"type":"v1","addr":"192.168.123.106:6829","nonce":3132188684}]},"public_addr":"192.168.123.106:6821/3132188684","cluster_addr":"192.168.123.106:6825/3132188684","heartbeat_back_addr":"192.168.123.106:6833/3132188684","heartbeat_front_addr":"192.168.123.106:6829/3132188684","state":["exists","up"]},{"osd":5,"uuid":"d88b8f6d-010d-4f77-8ded-f8a9fd351fe7","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":14,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6818","nonce":2357546250},{"type":"v1","addr":"192.168.123.106:6820","nonce":2357546250}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6822","nonce":2357546250},{"type":"v1","addr":"192.168.123.106:6824","nonce":2357546250}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6830","nonce":2357546250},{"type":"v1","addr":"192.168.123.106:6832","nonce":2357546250}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6826","nonce":2357546250},{"type":"v1","addr":"192.168.123.106:6828","nonce":2357546250}]},"public_addr":"192.168.123.106:6820/2357546250","cluster_addr":"192.168.123.106:6824/2357546250","heartbeat_back_addr":"192.168.123.106:6832/2357546250","heartbeat_front_addr":"192.168.123.106:6828/2357546250","state":["exists","up"]},{"osd":6,"uuid":"1c881e96-364e-4f1e-a73e-9b75d1db46dd","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":14,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6802","nonce":2938966295},{"type":"v1","addr":"192.168.123.106:6803","nonce":2938966295}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6804","nonce":2938966295},{"type":"v1","addr":"192.168.123.106:6805","nonce":2938966295}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6808","nonce":2938966295},{"type":"v1","addr":"192.168.123.106:6809","nonce":2938966295}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6806","nonce":2938966295},{"type":"v1","addr":"192.168.123.106:6807","nonce":2938966295}]},"public_addr":"192.168.123.106:6803/2938966295","cluster_addr":"192.168.123.106:6805/2938966295","heartbeat_back_addr":"192.168.123.106:6809/2938966295","heartbeat_front_addr":"192.168.123.106:6807/2938966295","state":["exists","up"]},{"osd":7,"uuid":"6c76eb32-2791-48e0-a9ca-205cacde72bc","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":15,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6810","nonce":4085424980},{"type":"v1","addr":"192.168.123.106:6811","nonce":4085424980}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6812","nonce":4085424980},{"type":"v1","addr":"192.168.123.106:6813","nonce":4085424980}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6816","nonce":4085424980},{"type":"v1","addr":"192.168.123.106:6817","nonce":4085424980}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.106:6814","nonce":4085424980},{"type":"v1","addr":"192.168.123.106:6815","nonce":4085424980}]},"public_addr":"192.168.123.106:6811/4085424980","cluster_addr":"192.168.123.106:6813/4085424980","heartbeat_back_addr":"192.168.123.106:6817/4085424980","heartbeat_front_addr":"192.168.123.106:6815/4085424980","state":["exists","up"]}],"osd_xinfo":[{"osd":0,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-04-01T02:24:27.094931+0000","dead_epoch":0},{"osd":1,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-04-01T02:24:26.960504+0000","dead_epoch":0},{"osd":2,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-04-01T02:24:27.117935+0000","dead_epoch":0},{"osd":3,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-04-01T02:24:27.002489+0000","dead_epoch":0},{"osd":4,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":5,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":6,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0},{"osd":7,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"0.000000","dead_epoch":0}],"pg_upmap":[],"pg_upmap_items":[],"pg_upmap_primaries":[],"pg_temp":[],"primary_temp":[],"blocklist":{},"range_blocklist":{},"erasure_code_profiles":{"default":{"crush-failure-domain":"osd","k":"2","m":"1","plugin":"isa","technique":"reed_sol_van"}},"removed_snaps_queue":[{"pool":1,"snaps":[{"begin":2,"length":1}]}],"new_removed_snaps":[{"pool":1,"snaps":[{"begin":2,"length":1}]}],"new_purged_snaps":[],"crush_node_flags":{},"device_class_flags":{},"stretch_mode":{"stretch_mode_enabled":false,"stretch_bucket_count":0,"degraded_stretch_mode":0,"recovering_stretch_mode":0,"stretch_mode_bucket":0}} 2026-04-01T02:24:38.219 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph tell osd.0 flush_pg_stats 2026-04-01T02:24:38.219 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph tell osd.1 flush_pg_stats 2026-04-01T02:24:38.219 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph tell osd.2 flush_pg_stats 2026-04-01T02:24:38.219 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph tell osd.3 flush_pg_stats 2026-04-01T02:24:38.220 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph tell osd.4 flush_pg_stats 2026-04-01T02:24:38.220 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph tell osd.5 flush_pg_stats 2026-04-01T02:24:38.220 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph tell osd.6 flush_pg_stats 2026-04-01T02:24:38.220 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph tell osd.7 flush_pg_stats 2026-04-01T02:24:38.414 INFO:teuthology.orchestra.run.vm03.stdout:55834574850 2026-04-01T02:24:38.414 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.2 2026-04-01T02:24:38.421 INFO:teuthology.orchestra.run.vm03.stdout:55834574850 2026-04-01T02:24:38.421 INFO:teuthology.orchestra.run.vm03.stdout:55834574852 2026-04-01T02:24:38.422 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.6 2026-04-01T02:24:38.422 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.5 2026-04-01T02:24:38.428 INFO:teuthology.orchestra.run.vm03.stdout:55834574852 2026-04-01T02:24:38.432 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.4 2026-04-01T02:24:38.433 INFO:teuthology.orchestra.run.vm03.stdout:55834574850 2026-04-01T02:24:38.433 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.7 2026-04-01T02:24:38.440 INFO:teuthology.orchestra.run.vm03.stdout:55834574852 2026-04-01T02:24:38.440 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.0 2026-04-01T02:24:38.447 INFO:teuthology.orchestra.run.vm03.stdout:55834574851 2026-04-01T02:24:38.447 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.1 2026-04-01T02:24:38.457 INFO:teuthology.orchestra.run.vm03.stdout:55834574852 2026-04-01T02:24:38.457 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.3 2026-04-01T02:24:38.743 INFO:teuthology.orchestra.run.vm03.stdout:55834574849 2026-04-01T02:24:38.765 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574850 got 55834574849 for osd.7 2026-04-01T02:24:38.816 INFO:teuthology.orchestra.run.vm03.stdout:55834574851 2026-04-01T02:24:38.841 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574852 got 55834574851 for osd.6 2026-04-01T02:24:38.843 INFO:teuthology.orchestra.run.vm03.stdout:55834574851 2026-04-01T02:24:38.846 INFO:teuthology.orchestra.run.vm03.stdout:55834574849 2026-04-01T02:24:38.866 INFO:teuthology.orchestra.run.vm03.stdout:55834574849 2026-04-01T02:24:38.867 INFO:teuthology.orchestra.run.vm03.stdout:55834574851 2026-04-01T02:24:38.868 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574852 got 55834574851 for osd.0 2026-04-01T02:24:38.868 INFO:teuthology.orchestra.run.vm03.stdout:55834574850 2026-04-01T02:24:38.879 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574850 got 55834574849 for osd.5 2026-04-01T02:24:38.882 INFO:teuthology.orchestra.run.vm03.stdout:55834574851 2026-04-01T02:24:38.889 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574852 got 55834574851 for osd.3 2026-04-01T02:24:38.889 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574850 got 55834574849 for osd.2 2026-04-01T02:24:38.891 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574851 got 55834574850 for osd.1 2026-04-01T02:24:38.896 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574852 got 55834574851 for osd.4 2026-04-01T02:24:39.766 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.7 2026-04-01T02:24:39.841 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.6 2026-04-01T02:24:39.869 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.0 2026-04-01T02:24:39.880 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.5 2026-04-01T02:24:39.889 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.3 2026-04-01T02:24:39.889 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.2 2026-04-01T02:24:39.893 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.1 2026-04-01T02:24:39.897 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph osd last-stat-seq osd.4 2026-04-01T02:24:40.013 INFO:teuthology.orchestra.run.vm03.stdout:55834574851 2026-04-01T02:24:40.037 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574850 got 55834574851 for osd.7 2026-04-01T02:24:40.037 DEBUG:teuthology.parallel:result is None 2026-04-01T02:24:40.147 INFO:teuthology.orchestra.run.vm03.stdout:55834574853 2026-04-01T02:24:40.165 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574852 got 55834574853 for osd.6 2026-04-01T02:24:40.165 DEBUG:teuthology.parallel:result is None 2026-04-01T02:24:40.168 INFO:teuthology.orchestra.run.vm03.stdout:55834574851 2026-04-01T02:24:40.194 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574850 got 55834574851 for osd.5 2026-04-01T02:24:40.194 DEBUG:teuthology.parallel:result is None 2026-04-01T02:24:40.247 INFO:teuthology.orchestra.run.vm03.stdout:55834574851 2026-04-01T02:24:40.249 INFO:teuthology.orchestra.run.vm03.stdout:55834574852 2026-04-01T02:24:40.264 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574851 got 55834574852 for osd.1 2026-04-01T02:24:40.264 DEBUG:teuthology.parallel:result is None 2026-04-01T02:24:40.269 INFO:teuthology.orchestra.run.vm03.stdout:55834574853 2026-04-01T02:24:40.269 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574850 got 55834574851 for osd.2 2026-04-01T02:24:40.269 DEBUG:teuthology.parallel:result is None 2026-04-01T02:24:40.277 INFO:teuthology.orchestra.run.vm03.stdout:55834574853 2026-04-01T02:24:40.280 INFO:teuthology.orchestra.run.vm03.stdout:55834574853 2026-04-01T02:24:40.281 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574852 got 55834574853 for osd.4 2026-04-01T02:24:40.281 DEBUG:teuthology.parallel:result is None 2026-04-01T02:24:40.290 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574852 got 55834574853 for osd.3 2026-04-01T02:24:40.290 DEBUG:teuthology.parallel:result is None 2026-04-01T02:24:40.294 INFO:tasks.ceph.ceph_manager.ceph:need seq 55834574852 got 55834574853 for osd.0 2026-04-01T02:24:40.294 DEBUG:teuthology.parallel:result is None 2026-04-01T02:24:40.294 INFO:tasks.ceph.ceph_manager.ceph:waiting for clean 2026-04-01T02:24:40.294 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph pg dump --format=json 2026-04-01T02:24:40.523 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:40.523 INFO:teuthology.orchestra.run.vm03.stderr:dumped all 2026-04-01T02:24:40.534 INFO:teuthology.orchestra.run.vm03.stdout:{"pg_ready":true,"pg_map":{"version":15,"stamp":"2026-04-01T02:24:39.245429+0000","last_osdmap_epoch":0,"last_pg_scan":0,"pg_stats_sum":{"stat_sum":{"num_bytes":459299,"num_objects":4,"num_object_clones":0,"num_object_copies":8,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":4,"num_whiteouts":0,"num_read":46,"num_read_kb":37,"num_write":59,"num_write_kb":586,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"store_stats":{"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},"log_size":35,"ondisk_log_size":35,"up":18,"acting":18,"num_store_stats":0},"osd_stats_sum":{"up_from":0,"seq":0,"num_pgs":17,"num_osds":8,"num_per_pool_osds":8,"num_per_pool_omap_osds":8,"kb":754974720,"kb_used":1036488,"kb_used_data":2188,"kb_used_omap":50,"kb_used_meta":214477,"kb_avail":753938232,"statfs":{"total":773094113280,"available":772032749568,"internally_reserved":0,"allocated":2240512,"data_stored":1323703,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":51336,"internal_metadata":219625336},"hb_peers":[],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[],"network_ping_times":[]},"pg_stats_delta":{"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"store_stats":{"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},"log_size":0,"ondisk_log_size":0,"up":0,"acting":0,"num_store_stats":0,"stamp_delta":"0.000000"},"pg_stats":[{"pgid":"1.7","version":"0'0","reported_seq":20,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:38.317123+0000","last_change":"2026-04-01T02:24:38.317123+0000","last_active":"2026-04-01T02:24:38.317123+0000","last_peered":"2026-04-01T02:24:38.317123+0000","last_clean":"2026-04-01T02:24:38.317123+0000","last_became_active":"2026-04-01T02:24:35.317959+0000","last_became_peered":"2026-04-01T02:24:35.317959+0000","last_unstale":"2026-04-01T02:24:38.317123+0000","last_undegraded":"2026-04-01T02:24:38.317123+0000","last_fullsized":"2026-04-01T02:24:38.317123+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":0,"log_dups_size":0,"ondisk_log_size":0,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T03:30:24.543101+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00042498600000000003,"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[6,3],"acting":[6,3],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":6,"acting_primary":6,"purged_snaps":[{"start":"2","length":"1"}]},{"pgid":"1.6","version":"0'0","reported_seq":20,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:38.317093+0000","last_change":"2026-04-01T02:24:38.317093+0000","last_active":"2026-04-01T02:24:38.317093+0000","last_peered":"2026-04-01T02:24:38.317093+0000","last_clean":"2026-04-01T02:24:38.317093+0000","last_became_active":"2026-04-01T02:24:36.005924+0000","last_became_peered":"2026-04-01T02:24:36.005924+0000","last_unstale":"2026-04-01T02:24:38.317093+0000","last_undegraded":"2026-04-01T02:24:38.317093+0000","last_fullsized":"2026-04-01T02:24:38.317093+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":0,"log_dups_size":0,"ondisk_log_size":0,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T06:41:20.848096+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00040274400000000002,"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[6,0],"acting":[6,0],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":6,"acting_primary":6,"purged_snaps":[{"start":"2","length":"1"}]},{"pgid":"1.5","version":"0'0","reported_seq":20,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:38.386432+0000","last_change":"2026-04-01T02:24:38.386432+0000","last_active":"2026-04-01T02:24:38.386432+0000","last_peered":"2026-04-01T02:24:38.386432+0000","last_clean":"2026-04-01T02:24:38.386432+0000","last_became_active":"2026-04-01T02:24:35.981016+0000","last_became_peered":"2026-04-01T02:24:35.981016+0000","last_unstale":"2026-04-01T02:24:38.386432+0000","last_undegraded":"2026-04-01T02:24:38.386432+0000","last_fullsized":"2026-04-01T02:24:38.386432+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":0,"log_dups_size":0,"ondisk_log_size":0,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T09:28:44.376987+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00033543800000000002,"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[4,2],"acting":[4,2],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":4,"acting_primary":4,"purged_snaps":[{"start":"2","length":"1"}]},{"pgid":"1.4","version":"0'0","reported_seq":20,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:37.323971+0000","last_change":"2026-04-01T02:24:37.323971+0000","last_active":"2026-04-01T02:24:37.323971+0000","last_peered":"2026-04-01T02:24:37.323971+0000","last_clean":"2026-04-01T02:24:37.323971+0000","last_became_active":"2026-04-01T02:24:35.317554+0000","last_became_peered":"2026-04-01T02:24:35.317554+0000","last_unstale":"2026-04-01T02:24:37.323971+0000","last_undegraded":"2026-04-01T02:24:37.323971+0000","last_fullsized":"2026-04-01T02:24:37.323971+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":0,"log_dups_size":0,"ondisk_log_size":0,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T09:28:44.376987+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00022975099999999999,"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[3,5],"acting":[3,5],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":3,"acting_primary":3,"purged_snaps":[{"start":"2","length":"1"}]},{"pgid":"2.0","version":"16'32","reported_seq":57,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:37.323037+0000","last_change":"2026-04-01T02:24:36.319226+0000","last_active":"2026-04-01T02:24:37.323037+0000","last_peered":"2026-04-01T02:24:37.323037+0000","last_clean":"2026-04-01T02:24:37.323037+0000","last_became_active":"2026-04-01T02:24:36.319058+0000","last_became_peered":"2026-04-01T02:24:36.319058+0000","last_unstale":"2026-04-01T02:24:37.323037+0000","last_undegraded":"2026-04-01T02:24:37.323037+0000","last_fullsized":"2026-04-01T02:24:37.323037+0000","mapping_epoch":15,"log_start":"0'0","ondisk_log_start":"0'0","created":15,"last_epoch_clean":16,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:35.304085+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:35.304085+0000","last_clean_scrub_stamp":"2026-04-01T02:24:35.304085+0000","objects_scrubbed":0,"log_size":32,"log_dups_size":0,"ondisk_log_size":32,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T03:30:25.550091+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0,"stat_sum":{"num_bytes":459280,"num_objects":2,"num_object_clones":0,"num_object_copies":4,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":2,"num_whiteouts":0,"num_read":46,"num_read_kb":37,"num_write":57,"num_write_kb":584,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[7,1],"acting":[7,1],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":7,"acting_primary":7,"purged_snaps":[]},{"pgid":"1.3","version":"15'1","reported_seq":21,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:38.386701+0000","last_change":"2026-04-01T02:24:38.386701+0000","last_active":"2026-04-01T02:24:38.386701+0000","last_peered":"2026-04-01T02:24:38.386701+0000","last_clean":"2026-04-01T02:24:38.386701+0000","last_became_active":"2026-04-01T02:24:35.317447+0000","last_became_peered":"2026-04-01T02:24:35.317447+0000","last_unstale":"2026-04-01T02:24:38.386701+0000","last_undegraded":"2026-04-01T02:24:38.386701+0000","last_fullsized":"2026-04-01T02:24:38.386701+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":1,"log_dups_size":0,"ondisk_log_size":1,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T09:28:44.376987+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00025872500000000001,"stat_sum":{"num_bytes":0,"num_objects":1,"num_object_clones":0,"num_object_copies":2,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":1,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[1,6],"acting":[1,6],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":1,"acting_primary":1,"purged_snaps":[{"start":"2","length":"1"}]},{"pgid":"1.2","version":"17'2","reported_seq":22,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:37.327213+0000","last_change":"2026-04-01T02:24:37.323273+0000","last_active":"2026-04-01T02:24:37.327213+0000","last_peered":"2026-04-01T02:24:37.327213+0000","last_clean":"2026-04-01T02:24:37.327213+0000","last_became_active":"2026-04-01T02:24:36.006785+0000","last_became_peered":"2026-04-01T02:24:36.006785+0000","last_unstale":"2026-04-01T02:24:37.327213+0000","last_undegraded":"2026-04-01T02:24:37.327213+0000","last_fullsized":"2026-04-01T02:24:37.327213+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":2,"log_dups_size":0,"ondisk_log_size":2,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T06:41:20.848096+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00029460000000000001,"stat_sum":{"num_bytes":19,"num_objects":1,"num_object_clones":0,"num_object_copies":2,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":1,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":2,"num_write_kb":2,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[7,0],"acting":[7,0],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":7,"acting_primary":7,"purged_snaps":[{"start":"2","length":"1"}]},{"pgid":"1.1","version":"0'0","reported_seq":20,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:38.228269+0000","last_change":"2026-04-01T02:24:38.228269+0000","last_active":"2026-04-01T02:24:38.228269+0000","last_peered":"2026-04-01T02:24:38.228269+0000","last_clean":"2026-04-01T02:24:38.228269+0000","last_became_active":"2026-04-01T02:24:36.004627+0000","last_became_peered":"2026-04-01T02:24:36.004627+0000","last_unstale":"2026-04-01T02:24:38.228269+0000","last_undegraded":"2026-04-01T02:24:38.228269+0000","last_fullsized":"2026-04-01T02:24:38.228269+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":0,"log_dups_size":0,"ondisk_log_size":0,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T09:28:44.376987+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00042962399999999998,"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[5,0],"acting":[5,0],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":5,"acting_primary":5,"purged_snaps":[{"start":"2","length":"1"}]},{"pgid":"1.0","version":"0'0","reported_seq":20,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:37.323276+0000","last_change":"2026-04-01T02:24:37.323276+0000","last_active":"2026-04-01T02:24:37.323276+0000","last_peered":"2026-04-01T02:24:37.323276+0000","last_clean":"2026-04-01T02:24:37.323276+0000","last_became_active":"2026-04-01T02:24:36.006465+0000","last_became_peered":"2026-04-01T02:24:36.006465+0000","last_unstale":"2026-04-01T02:24:37.323276+0000","last_undegraded":"2026-04-01T02:24:37.323276+0000","last_fullsized":"2026-04-01T02:24:37.323276+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":0,"log_dups_size":0,"ondisk_log_size":0,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T14:23:00.703979+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00028514300000000002,"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[7,0],"acting":[7,0],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":7,"acting_primary":7,"purged_snaps":[{"start":"2","length":"1"}]}],"pool_stats":[{"poolid":2,"num_pg":1,"stat_sum":{"num_bytes":459280,"num_objects":2,"num_object_clones":0,"num_object_copies":4,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":2,"num_whiteouts":0,"num_read":46,"num_read_kb":37,"num_write":57,"num_write_kb":584,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"store_stats":{"total":0,"available":0,"internally_reserved":0,"allocated":925696,"data_stored":918560,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},"log_size":32,"ondisk_log_size":32,"up":2,"acting":2,"num_store_stats":2},{"poolid":1,"num_pg":8,"stat_sum":{"num_bytes":19,"num_objects":2,"num_object_clones":0,"num_object_copies":4,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":2,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":2,"num_write_kb":2,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"store_stats":{"total":0,"available":0,"internally_reserved":0,"allocated":8192,"data_stored":38,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},"log_size":3,"ondisk_log_size":3,"up":16,"acting":16,"num_store_stats":8}],"osd_stats":[{"osd":7,"up_from":13,"seq":55834574851,"num_pgs":3,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":27596,"kb_used_data":620,"kb_used_omap":6,"kb_used_meta":26809,"kb_avail":94344244,"statfs":{"total":96636764160,"available":96608505856,"internally_reserved":0,"allocated":634880,"data_stored":512258,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":6822,"internal_metadata":27452762},"hb_peers":[0,1,2,3,4,5,6],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":6,"up_from":13,"seq":55834574853,"num_pgs":3,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":27012,"kb_used_data":164,"kb_used_omap":6,"kb_used_meta":26809,"kb_avail":94344828,"statfs":{"total":96636764160,"available":96609103872,"internally_reserved":0,"allocated":167936,"data_stored":52959,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":6826,"internal_metadata":27452758},"hb_peers":[0,1,2,3,4,5,7],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":5,"up_from":13,"seq":55834574851,"num_pgs":2,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":436620,"kb_used_data":152,"kb_used_omap":6,"kb_used_meta":26809,"kb_avail":93935220,"statfs":{"total":96636764160,"available":96189665280,"internally_reserved":0,"allocated":155648,"data_stored":46770,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":6170,"internal_metadata":27453414},"hb_peers":[0,1,2,3,4,6,7],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":4,"up_from":13,"seq":55834574853,"num_pgs":1,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":27020,"kb_used_data":152,"kb_used_omap":6,"kb_used_meta":26809,"kb_avail":94344820,"statfs":{"total":96636764160,"available":96609095680,"internally_reserved":0,"allocated":155648,"data_stored":46770,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":6170,"internal_metadata":27453414},"hb_peers":[0,1,2,3,5,6,7],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":3,"up_from":13,"seq":55834574853,"num_pgs":2,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":27012,"kb_used_data":164,"kb_used_omap":6,"kb_used_meta":26809,"kb_avail":94344828,"statfs":{"total":96636764160,"available":96609103872,"internally_reserved":0,"allocated":167936,"data_stored":52959,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":6825,"internal_metadata":27452759},"hb_peers":[0,1,2,4,5,6,7],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":2,"up_from":13,"seq":55834574851,"num_pgs":0,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":436620,"kb_used_data":152,"kb_used_omap":6,"kb_used_meta":26809,"kb_avail":93935220,"statfs":{"total":96636764160,"available":96189665280,"internally_reserved":0,"allocated":155648,"data_stored":46770,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":6170,"internal_metadata":27453414},"hb_peers":[0,1,3,4,5,6,7],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":1,"up_from":13,"seq":55834574852,"num_pgs":2,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":27592,"kb_used_data":616,"kb_used_omap":5,"kb_used_meta":26810,"kb_avail":94344248,"statfs":{"total":96636764160,"available":96608509952,"internally_reserved":0,"allocated":630784,"data_stored":512239,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":5526,"internal_metadata":27454058},"hb_peers":[0,2,3,4,5,6,7],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":0,"up_from":13,"seq":55834574853,"num_pgs":4,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":27016,"kb_used_data":168,"kb_used_omap":6,"kb_used_meta":26809,"kb_avail":94344824,"statfs":{"total":96636764160,"available":96609099776,"internally_reserved":0,"allocated":172032,"data_stored":52978,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":6827,"internal_metadata":27452757},"hb_peers":[1,2,3,4,5,6,7],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]}],"pool_statfs":[{"poolid":1,"osd":0,"total":0,"available":0,"internally_reserved":0,"allocated":4096,"data_stored":19,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":1,"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":2,"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":3,"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":4,"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":5,"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":6,"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":7,"total":0,"available":0,"internally_reserved":0,"allocated":4096,"data_stored":19,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":2,"osd":1,"total":0,"available":0,"internally_reserved":0,"allocated":462848,"data_stored":459280,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":2,"osd":7,"total":0,"available":0,"internally_reserved":0,"allocated":462848,"data_stored":459280,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0}]}} 2026-04-01T02:24:40.534 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph pg dump --format=json 2026-04-01T02:24:40.711 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:40.712 INFO:teuthology.orchestra.run.vm03.stderr:dumped all 2026-04-01T02:24:40.723 INFO:teuthology.orchestra.run.vm03.stdout:{"pg_ready":true,"pg_map":{"version":15,"stamp":"2026-04-01T02:24:39.245429+0000","last_osdmap_epoch":0,"last_pg_scan":0,"pg_stats_sum":{"stat_sum":{"num_bytes":459299,"num_objects":4,"num_object_clones":0,"num_object_copies":8,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":4,"num_whiteouts":0,"num_read":46,"num_read_kb":37,"num_write":59,"num_write_kb":586,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"store_stats":{"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},"log_size":35,"ondisk_log_size":35,"up":18,"acting":18,"num_store_stats":0},"osd_stats_sum":{"up_from":0,"seq":0,"num_pgs":17,"num_osds":8,"num_per_pool_osds":8,"num_per_pool_omap_osds":8,"kb":754974720,"kb_used":1036488,"kb_used_data":2188,"kb_used_omap":50,"kb_used_meta":214477,"kb_avail":753938232,"statfs":{"total":773094113280,"available":772032749568,"internally_reserved":0,"allocated":2240512,"data_stored":1323703,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":51336,"internal_metadata":219625336},"hb_peers":[],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[],"network_ping_times":[]},"pg_stats_delta":{"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"store_stats":{"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},"log_size":0,"ondisk_log_size":0,"up":0,"acting":0,"num_store_stats":0,"stamp_delta":"0.000000"},"pg_stats":[{"pgid":"1.7","version":"0'0","reported_seq":20,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:38.317123+0000","last_change":"2026-04-01T02:24:38.317123+0000","last_active":"2026-04-01T02:24:38.317123+0000","last_peered":"2026-04-01T02:24:38.317123+0000","last_clean":"2026-04-01T02:24:38.317123+0000","last_became_active":"2026-04-01T02:24:35.317959+0000","last_became_peered":"2026-04-01T02:24:35.317959+0000","last_unstale":"2026-04-01T02:24:38.317123+0000","last_undegraded":"2026-04-01T02:24:38.317123+0000","last_fullsized":"2026-04-01T02:24:38.317123+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":0,"log_dups_size":0,"ondisk_log_size":0,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T03:30:24.543101+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00042498600000000003,"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[6,3],"acting":[6,3],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":6,"acting_primary":6,"purged_snaps":[{"start":"2","length":"1"}]},{"pgid":"1.6","version":"0'0","reported_seq":20,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:38.317093+0000","last_change":"2026-04-01T02:24:38.317093+0000","last_active":"2026-04-01T02:24:38.317093+0000","last_peered":"2026-04-01T02:24:38.317093+0000","last_clean":"2026-04-01T02:24:38.317093+0000","last_became_active":"2026-04-01T02:24:36.005924+0000","last_became_peered":"2026-04-01T02:24:36.005924+0000","last_unstale":"2026-04-01T02:24:38.317093+0000","last_undegraded":"2026-04-01T02:24:38.317093+0000","last_fullsized":"2026-04-01T02:24:38.317093+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":0,"log_dups_size":0,"ondisk_log_size":0,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T06:41:20.848096+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00040274400000000002,"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[6,0],"acting":[6,0],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":6,"acting_primary":6,"purged_snaps":[{"start":"2","length":"1"}]},{"pgid":"1.5","version":"0'0","reported_seq":20,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:38.386432+0000","last_change":"2026-04-01T02:24:38.386432+0000","last_active":"2026-04-01T02:24:38.386432+0000","last_peered":"2026-04-01T02:24:38.386432+0000","last_clean":"2026-04-01T02:24:38.386432+0000","last_became_active":"2026-04-01T02:24:35.981016+0000","last_became_peered":"2026-04-01T02:24:35.981016+0000","last_unstale":"2026-04-01T02:24:38.386432+0000","last_undegraded":"2026-04-01T02:24:38.386432+0000","last_fullsized":"2026-04-01T02:24:38.386432+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":0,"log_dups_size":0,"ondisk_log_size":0,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T09:28:44.376987+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00033543800000000002,"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[4,2],"acting":[4,2],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":4,"acting_primary":4,"purged_snaps":[{"start":"2","length":"1"}]},{"pgid":"1.4","version":"0'0","reported_seq":20,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:37.323971+0000","last_change":"2026-04-01T02:24:37.323971+0000","last_active":"2026-04-01T02:24:37.323971+0000","last_peered":"2026-04-01T02:24:37.323971+0000","last_clean":"2026-04-01T02:24:37.323971+0000","last_became_active":"2026-04-01T02:24:35.317554+0000","last_became_peered":"2026-04-01T02:24:35.317554+0000","last_unstale":"2026-04-01T02:24:37.323971+0000","last_undegraded":"2026-04-01T02:24:37.323971+0000","last_fullsized":"2026-04-01T02:24:37.323971+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":0,"log_dups_size":0,"ondisk_log_size":0,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T09:28:44.376987+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00022975099999999999,"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[3,5],"acting":[3,5],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":3,"acting_primary":3,"purged_snaps":[{"start":"2","length":"1"}]},{"pgid":"2.0","version":"16'32","reported_seq":57,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:37.323037+0000","last_change":"2026-04-01T02:24:36.319226+0000","last_active":"2026-04-01T02:24:37.323037+0000","last_peered":"2026-04-01T02:24:37.323037+0000","last_clean":"2026-04-01T02:24:37.323037+0000","last_became_active":"2026-04-01T02:24:36.319058+0000","last_became_peered":"2026-04-01T02:24:36.319058+0000","last_unstale":"2026-04-01T02:24:37.323037+0000","last_undegraded":"2026-04-01T02:24:37.323037+0000","last_fullsized":"2026-04-01T02:24:37.323037+0000","mapping_epoch":15,"log_start":"0'0","ondisk_log_start":"0'0","created":15,"last_epoch_clean":16,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:35.304085+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:35.304085+0000","last_clean_scrub_stamp":"2026-04-01T02:24:35.304085+0000","objects_scrubbed":0,"log_size":32,"log_dups_size":0,"ondisk_log_size":32,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T03:30:25.550091+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0,"stat_sum":{"num_bytes":459280,"num_objects":2,"num_object_clones":0,"num_object_copies":4,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":2,"num_whiteouts":0,"num_read":46,"num_read_kb":37,"num_write":57,"num_write_kb":584,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[7,1],"acting":[7,1],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":7,"acting_primary":7,"purged_snaps":[]},{"pgid":"1.3","version":"15'1","reported_seq":21,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:38.386701+0000","last_change":"2026-04-01T02:24:38.386701+0000","last_active":"2026-04-01T02:24:38.386701+0000","last_peered":"2026-04-01T02:24:38.386701+0000","last_clean":"2026-04-01T02:24:38.386701+0000","last_became_active":"2026-04-01T02:24:35.317447+0000","last_became_peered":"2026-04-01T02:24:35.317447+0000","last_unstale":"2026-04-01T02:24:38.386701+0000","last_undegraded":"2026-04-01T02:24:38.386701+0000","last_fullsized":"2026-04-01T02:24:38.386701+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":1,"log_dups_size":0,"ondisk_log_size":1,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T09:28:44.376987+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00025872500000000001,"stat_sum":{"num_bytes":0,"num_objects":1,"num_object_clones":0,"num_object_copies":2,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":1,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[1,6],"acting":[1,6],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":1,"acting_primary":1,"purged_snaps":[{"start":"2","length":"1"}]},{"pgid":"1.2","version":"17'2","reported_seq":22,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:37.327213+0000","last_change":"2026-04-01T02:24:37.323273+0000","last_active":"2026-04-01T02:24:37.327213+0000","last_peered":"2026-04-01T02:24:37.327213+0000","last_clean":"2026-04-01T02:24:37.327213+0000","last_became_active":"2026-04-01T02:24:36.006785+0000","last_became_peered":"2026-04-01T02:24:36.006785+0000","last_unstale":"2026-04-01T02:24:37.327213+0000","last_undegraded":"2026-04-01T02:24:37.327213+0000","last_fullsized":"2026-04-01T02:24:37.327213+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":2,"log_dups_size":0,"ondisk_log_size":2,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T06:41:20.848096+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00029460000000000001,"stat_sum":{"num_bytes":19,"num_objects":1,"num_object_clones":0,"num_object_copies":2,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":1,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":2,"num_write_kb":2,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[7,0],"acting":[7,0],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":7,"acting_primary":7,"purged_snaps":[{"start":"2","length":"1"}]},{"pgid":"1.1","version":"0'0","reported_seq":20,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:38.228269+0000","last_change":"2026-04-01T02:24:38.228269+0000","last_active":"2026-04-01T02:24:38.228269+0000","last_peered":"2026-04-01T02:24:38.228269+0000","last_clean":"2026-04-01T02:24:38.228269+0000","last_became_active":"2026-04-01T02:24:36.004627+0000","last_became_peered":"2026-04-01T02:24:36.004627+0000","last_unstale":"2026-04-01T02:24:38.228269+0000","last_undegraded":"2026-04-01T02:24:38.228269+0000","last_fullsized":"2026-04-01T02:24:38.228269+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":0,"log_dups_size":0,"ondisk_log_size":0,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T09:28:44.376987+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00042962399999999998,"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[5,0],"acting":[5,0],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":5,"acting_primary":5,"purged_snaps":[{"start":"2","length":"1"}]},{"pgid":"1.0","version":"0'0","reported_seq":20,"reported_epoch":17,"state":"active+clean","last_fresh":"2026-04-01T02:24:37.323276+0000","last_change":"2026-04-01T02:24:37.323276+0000","last_active":"2026-04-01T02:24:37.323276+0000","last_peered":"2026-04-01T02:24:37.323276+0000","last_clean":"2026-04-01T02:24:37.323276+0000","last_became_active":"2026-04-01T02:24:36.006465+0000","last_became_peered":"2026-04-01T02:24:36.006465+0000","last_unstale":"2026-04-01T02:24:37.323276+0000","last_undegraded":"2026-04-01T02:24:37.323276+0000","last_fullsized":"2026-04-01T02:24:37.323276+0000","mapping_epoch":14,"log_start":"0'0","ondisk_log_start":"0'0","created":14,"last_epoch_clean":15,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-04-01T02:24:34.297094+0000","last_clean_scrub_stamp":"2026-04-01T02:24:34.297094+0000","objects_scrubbed":0,"log_size":0,"log_dups_size":0,"ondisk_log_size":0,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T14:23:00.703979+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0.00028514300000000002,"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[7,0],"acting":[7,0],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":7,"acting_primary":7,"purged_snaps":[{"start":"2","length":"1"}]}],"pool_stats":[{"poolid":2,"num_pg":1,"stat_sum":{"num_bytes":459280,"num_objects":2,"num_object_clones":0,"num_object_copies":4,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":2,"num_whiteouts":0,"num_read":46,"num_read_kb":37,"num_write":57,"num_write_kb":584,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"store_stats":{"total":0,"available":0,"internally_reserved":0,"allocated":925696,"data_stored":918560,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},"log_size":32,"ondisk_log_size":32,"up":2,"acting":2,"num_store_stats":2},{"poolid":1,"num_pg":8,"stat_sum":{"num_bytes":19,"num_objects":2,"num_object_clones":0,"num_object_copies":4,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":2,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":2,"num_write_kb":2,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"store_stats":{"total":0,"available":0,"internally_reserved":0,"allocated":8192,"data_stored":38,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},"log_size":3,"ondisk_log_size":3,"up":16,"acting":16,"num_store_stats":8}],"osd_stats":[{"osd":7,"up_from":13,"seq":55834574851,"num_pgs":3,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":27596,"kb_used_data":620,"kb_used_omap":6,"kb_used_meta":26809,"kb_avail":94344244,"statfs":{"total":96636764160,"available":96608505856,"internally_reserved":0,"allocated":634880,"data_stored":512258,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":6822,"internal_metadata":27452762},"hb_peers":[0,1,2,3,4,5,6],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":6,"up_from":13,"seq":55834574853,"num_pgs":3,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":27012,"kb_used_data":164,"kb_used_omap":6,"kb_used_meta":26809,"kb_avail":94344828,"statfs":{"total":96636764160,"available":96609103872,"internally_reserved":0,"allocated":167936,"data_stored":52959,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":6826,"internal_metadata":27452758},"hb_peers":[0,1,2,3,4,5,7],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":5,"up_from":13,"seq":55834574851,"num_pgs":2,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":436620,"kb_used_data":152,"kb_used_omap":6,"kb_used_meta":26809,"kb_avail":93935220,"statfs":{"total":96636764160,"available":96189665280,"internally_reserved":0,"allocated":155648,"data_stored":46770,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":6170,"internal_metadata":27453414},"hb_peers":[0,1,2,3,4,6,7],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":4,"up_from":13,"seq":55834574853,"num_pgs":1,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":27020,"kb_used_data":152,"kb_used_omap":6,"kb_used_meta":26809,"kb_avail":94344820,"statfs":{"total":96636764160,"available":96609095680,"internally_reserved":0,"allocated":155648,"data_stored":46770,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":6170,"internal_metadata":27453414},"hb_peers":[0,1,2,3,5,6,7],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":3,"up_from":13,"seq":55834574853,"num_pgs":2,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":27012,"kb_used_data":164,"kb_used_omap":6,"kb_used_meta":26809,"kb_avail":94344828,"statfs":{"total":96636764160,"available":96609103872,"internally_reserved":0,"allocated":167936,"data_stored":52959,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":6825,"internal_metadata":27452759},"hb_peers":[0,1,2,4,5,6,7],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":2,"up_from":13,"seq":55834574851,"num_pgs":0,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":436620,"kb_used_data":152,"kb_used_omap":6,"kb_used_meta":26809,"kb_avail":93935220,"statfs":{"total":96636764160,"available":96189665280,"internally_reserved":0,"allocated":155648,"data_stored":46770,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":6170,"internal_metadata":27453414},"hb_peers":[0,1,3,4,5,6,7],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":1,"up_from":13,"seq":55834574852,"num_pgs":2,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":27592,"kb_used_data":616,"kb_used_omap":5,"kb_used_meta":26810,"kb_avail":94344248,"statfs":{"total":96636764160,"available":96608509952,"internally_reserved":0,"allocated":630784,"data_stored":512239,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":5526,"internal_metadata":27454058},"hb_peers":[0,2,3,4,5,6,7],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":0,"up_from":13,"seq":55834574853,"num_pgs":4,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":94371840,"kb_used":27016,"kb_used_data":168,"kb_used_omap":6,"kb_used_meta":26809,"kb_avail":94344824,"statfs":{"total":96636764160,"available":96609099776,"internally_reserved":0,"allocated":172032,"data_stored":52978,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":6827,"internal_metadata":27452757},"hb_peers":[1,2,3,4,5,6,7],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]}],"pool_statfs":[{"poolid":1,"osd":0,"total":0,"available":0,"internally_reserved":0,"allocated":4096,"data_stored":19,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":1,"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":2,"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":3,"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":4,"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":5,"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":6,"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":7,"total":0,"available":0,"internally_reserved":0,"allocated":4096,"data_stored":19,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":2,"osd":1,"total":0,"available":0,"internally_reserved":0,"allocated":462848,"data_stored":459280,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":2,"osd":7,"total":0,"available":0,"internally_reserved":0,"allocated":462848,"data_stored":459280,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0}]}} 2026-04-01T02:24:40.723 INFO:tasks.ceph.ceph_manager.ceph:clean! 2026-04-01T02:24:40.723 INFO:tasks.ceph:Waiting until ceph cluster ceph is healthy... 2026-04-01T02:24:40.723 INFO:tasks.ceph.ceph_manager.ceph:wait_until_healthy 2026-04-01T02:24:40.723 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph health --format=json 2026-04-01T02:24:40.932 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:24:40.932 INFO:teuthology.orchestra.run.vm03.stdout:{"status":"HEALTH_OK","checks":{},"mutes":[]} 2026-04-01T02:24:40.942 INFO:tasks.ceph.ceph_manager.ceph:wait_until_healthy done 2026-04-01T02:24:40.942 INFO:teuthology.run_tasks:Running task openssl_keys... 2026-04-01T02:24:40.945 INFO:teuthology.run_tasks:Running task rgw... 2026-04-01T02:24:40.949 DEBUG:tasks.rgw:config is {'client.0': None, 'client.1': None, 'client.2': None} 2026-04-01T02:24:40.949 DEBUG:tasks.rgw:client list is dict_keys(['client.0', 'client.1', 'client.2']) 2026-04-01T02:24:40.949 INFO:tasks.rgw:Creating data pools 2026-04-01T02:24:40.949 DEBUG:tasks.rgw:Obtaining remote for client client.0 2026-04-01T02:24:40.949 DEBUG:teuthology.orchestra.run.vm03:> sudo ceph osd pool create default.rgw.buckets.data 64 64 --cluster ceph 2026-04-01T02:24:41.388 INFO:teuthology.orchestra.run.vm03.stderr:pool 'default.rgw.buckets.data' created 2026-04-01T02:24:41.409 DEBUG:teuthology.orchestra.run.vm03:> sudo ceph osd pool application enable default.rgw.buckets.data rgw --cluster ceph 2026-04-01T02:24:42.391 INFO:teuthology.orchestra.run.vm03.stderr:enabled application 'rgw' on pool 'default.rgw.buckets.data' 2026-04-01T02:24:42.425 DEBUG:teuthology.orchestra.run.vm03:> sudo ceph osd pool create default.rgw.buckets.index 64 64 --cluster ceph 2026-04-01T02:24:42.630 INFO:teuthology.orchestra.run.vm03.stderr:pool 'default.rgw.buckets.index' created 2026-04-01T02:24:42.652 DEBUG:teuthology.orchestra.run.vm03:> sudo ceph osd pool application enable default.rgw.buckets.index rgw --cluster ceph 2026-04-01T02:24:43.636 INFO:teuthology.orchestra.run.vm03.stderr:enabled application 'rgw' on pool 'default.rgw.buckets.index' 2026-04-01T02:24:43.666 DEBUG:tasks.rgw:Obtaining remote for client client.1 2026-04-01T02:24:43.666 DEBUG:teuthology.orchestra.run.vm06:> sudo ceph osd pool create default.rgw.buckets.data 64 64 --cluster ceph 2026-04-01T02:24:43.858 INFO:teuthology.orchestra.run.vm06.stderr:pool 'default.rgw.buckets.data' already exists 2026-04-01T02:24:43.869 DEBUG:teuthology.orchestra.run.vm06:> sudo ceph osd pool application enable default.rgw.buckets.data rgw --cluster ceph 2026-04-01T02:24:44.642 INFO:teuthology.orchestra.run.vm06.stderr:enabled application 'rgw' on pool 'default.rgw.buckets.data' 2026-04-01T02:24:44.654 DEBUG:teuthology.orchestra.run.vm06:> sudo ceph osd pool create default.rgw.buckets.index 64 64 --cluster ceph 2026-04-01T02:24:44.845 INFO:teuthology.orchestra.run.vm06.stderr:pool 'default.rgw.buckets.index' already exists 2026-04-01T02:24:44.856 DEBUG:teuthology.orchestra.run.vm06:> sudo ceph osd pool application enable default.rgw.buckets.index rgw --cluster ceph 2026-04-01T02:24:45.656 INFO:teuthology.orchestra.run.vm06.stderr:enabled application 'rgw' on pool 'default.rgw.buckets.index' 2026-04-01T02:24:45.668 DEBUG:tasks.rgw:Obtaining remote for client client.2 2026-04-01T02:24:45.668 DEBUG:teuthology.orchestra.run.vm08:> sudo ceph osd pool create default.rgw.buckets.data 64 64 --cluster ceph 2026-04-01T02:24:45.865 INFO:teuthology.orchestra.run.vm08.stderr:pool 'default.rgw.buckets.data' already exists 2026-04-01T02:24:45.875 DEBUG:teuthology.orchestra.run.vm08:> sudo ceph osd pool application enable default.rgw.buckets.data rgw --cluster ceph 2026-04-01T02:24:46.659 INFO:teuthology.orchestra.run.vm08.stderr:enabled application 'rgw' on pool 'default.rgw.buckets.data' 2026-04-01T02:24:46.672 DEBUG:teuthology.orchestra.run.vm08:> sudo ceph osd pool create default.rgw.buckets.index 64 64 --cluster ceph 2026-04-01T02:24:46.867 INFO:teuthology.orchestra.run.vm08.stderr:pool 'default.rgw.buckets.index' already exists 2026-04-01T02:24:46.879 DEBUG:teuthology.orchestra.run.vm08:> sudo ceph osd pool application enable default.rgw.buckets.index rgw --cluster ceph 2026-04-01T02:24:47.667 INFO:teuthology.orchestra.run.vm08.stderr:enabled application 'rgw' on pool 'default.rgw.buckets.index' 2026-04-01T02:24:47.680 DEBUG:tasks.rgw:Pools created 2026-04-01T02:24:47.680 INFO:tasks.util.rgw:rgwadmin: client.0 : ['user', 'list'] 2026-04-01T02:24:47.680 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.0', '--cluster', 'ceph', 'user', 'list'] 2026-04-01T02:24:47.680 DEBUG:teuthology.orchestra.run.vm03:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph user list 2026-04-01T02:24:47.715 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:47.715 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:49.733 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.731+0000 7f8d7e125900 20 rados->read ofs=0 len=0 2026-04-01T02:24:49.734 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.731+0000 7f8d7e125900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:49.734 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.731+0000 7f8d7e125900 20 realm 2026-04-01T02:24:49.734 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.731+0000 7f8d7e125900 20 rados->read ofs=0 len=0 2026-04-01T02:24:49.734 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.732+0000 7f8d7e125900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:49.734 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.732+0000 7f8d7e125900 4 RGWPeriod::init failed to init realm id : (2) No such file or directory 2026-04-01T02:24:49.734 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.732+0000 7f8d7e125900 20 rados->read ofs=0 len=0 2026-04-01T02:24:49.734 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.732+0000 7f8d7e125900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:49.734 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.732+0000 7f8d7e125900 20 rados->read ofs=0 len=0 2026-04-01T02:24:49.735 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.732+0000 7f8d7e125900 20 rados_obj.operate() r=0 bl.length=46 2026-04-01T02:24:49.735 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.732+0000 7f8d7e125900 20 rados->read ofs=0 len=0 2026-04-01T02:24:49.736 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.733+0000 7f8d7e125900 20 rados_obj.operate() r=0 bl.length=1060 2026-04-01T02:24:49.736 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.733+0000 7f8d7e125900 20 searching for the correct realm 2026-04-01T02:24:49.743 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.740+0000 7f8d7e125900 20 RGWRados::pool_iterate: got zone_info.9872c6d3-13db-4797-98fb-df48c9391eb9 2026-04-01T02:24:49.743 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.740+0000 7f8d7e125900 20 RGWRados::pool_iterate: got default.zonegroup. 2026-04-01T02:24:49.743 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.740+0000 7f8d7e125900 20 RGWRados::pool_iterate: got zonegroup_info.13fe02e2-6f52-4a18-a50f-ad9150d4f62b 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.740+0000 7f8d7e125900 20 RGWRados::pool_iterate: got default.zone. 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.740+0000 7f8d7e125900 20 RGWRados::pool_iterate: got zone_names.default 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.740+0000 7f8d7e125900 20 RGWRados::pool_iterate: got zonegroups_names.default 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.740+0000 7f8d7e125900 20 rados->read ofs=0 len=0 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.741+0000 7f8d7e125900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.741+0000 7f8d7e125900 20 rados->read ofs=0 len=0 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.741+0000 7f8d7e125900 20 rados_obj.operate() r=0 bl.length=46 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.741+0000 7f8d7e125900 20 rados->read ofs=0 len=0 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.741+0000 7f8d7e125900 20 rados_obj.operate() r=0 bl.length=436 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.741+0000 7f8d7e125900 20 zone default found 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.741+0000 7f8d7e125900 4 Realm: () 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.741+0000 7f8d7e125900 4 ZoneGroup: default (13fe02e2-6f52-4a18-a50f-ad9150d4f62b) 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.741+0000 7f8d7e125900 4 Zone: default (9872c6d3-13db-4797-98fb-df48c9391eb9) 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.741+0000 7f8d7e125900 10 cannot find current period zonegroup using local zonegroup configuration 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.741+0000 7f8d7e125900 20 zonegroup default 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.741+0000 7f8d7e125900 20 rados->read ofs=0 len=0 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.741+0000 7f8d7e125900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:49.744 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:49.741+0000 7f8d7e125900 20 rados->read ofs=0 len=0 2026-04-01T02:24:51.720 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:51.717+0000 7f8d7e125900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:51.720 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:51.717+0000 7f8d7e125900 20 rados->read ofs=0 len=0 2026-04-01T02:24:51.721 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:51.718+0000 7f8d7e125900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:51.721 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:51.718+0000 7f8d7e125900 20 started sync module instance, tier type = 2026-04-01T02:24:51.721 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:51.718+0000 7f8d7e125900 20 started zone id=9872c6d3-13db-4797-98fb-df48c9391eb9 (name=default) with tier type = 2026-04-01T02:24:53.746 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.743+0000 7f8d7e125900 20 add_watcher() i=0 2026-04-01T02:24:53.747 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.744+0000 7f8d7e125900 20 add_watcher() i=3 2026-04-01T02:24:53.749 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.746+0000 7f8d7e125900 20 add_watcher() i=1 2026-04-01T02:24:53.749 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.746+0000 7f8d7e125900 20 add_watcher() i=7 2026-04-01T02:24:53.749 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.746+0000 7f8d7e125900 20 add_watcher() i=5 2026-04-01T02:24:53.749 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.747+0000 7f8d7e125900 20 add_watcher() i=2 2026-04-01T02:24:53.749 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.747+0000 7f8d7e125900 20 add_watcher() i=4 2026-04-01T02:24:53.750 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.747+0000 7f8d7e125900 20 add_watcher() i=6 2026-04-01T02:24:53.750 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.747+0000 7f8d7e125900 2 all 8 watchers are set, enabling cache 2026-04-01T02:24:53.751 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.748+0000 7f8d72ffd640 5 boost::asio::awaitable, obj_version> > logback_generations::read(const DoutPrefixProvider*):446: oid=data_loggenerations_metadata not found 2026-04-01T02:24:53.751 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.748+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.0 2026-04-01T02:24:53.751 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.748+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.752 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.749+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.0 does not exist 2026-04-01T02:24:53.752 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.749+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.1 2026-04-01T02:24:53.752 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.749+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.753 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.750+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.1 does not exist 2026-04-01T02:24:53.753 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.750+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.2 2026-04-01T02:24:53.753 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.750+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.753 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.751+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.2 does not exist 2026-04-01T02:24:53.753 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.751+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.3 2026-04-01T02:24:53.753 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.751+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.754 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.751+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.3 does not exist 2026-04-01T02:24:53.754 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.751+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.4 2026-04-01T02:24:53.754 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.751+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.754 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.752+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.4 does not exist 2026-04-01T02:24:53.754 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.752+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.5 2026-04-01T02:24:53.754 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.752+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.755 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.752+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.5 does not exist 2026-04-01T02:24:53.755 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.752+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.6 2026-04-01T02:24:53.755 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.752+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.755 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.753+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.6 does not exist 2026-04-01T02:24:53.755 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.753+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.7 2026-04-01T02:24:53.755 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.753+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.756 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.753+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.7 does not exist 2026-04-01T02:24:53.756 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.753+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.8 2026-04-01T02:24:53.756 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.753+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.756 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.753+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.8 does not exist 2026-04-01T02:24:53.756 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.753+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.9 2026-04-01T02:24:53.756 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.753+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.756 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.753+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.9 does not exist 2026-04-01T02:24:53.756 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.753+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.10 2026-04-01T02:24:53.756 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.753+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.757 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.754+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.10 does not exist 2026-04-01T02:24:53.757 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.754+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.11 2026-04-01T02:24:53.757 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.754+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.757 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.754+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.11 does not exist 2026-04-01T02:24:53.757 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.754+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.12 2026-04-01T02:24:53.757 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.754+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.757 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.754+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.12 does not exist 2026-04-01T02:24:53.757 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.754+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.13 2026-04-01T02:24:53.757 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.754+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.757 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.755+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.13 does not exist 2026-04-01T02:24:53.757 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.755+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.14 2026-04-01T02:24:53.757 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.755+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.758 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.755+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.14 does not exist 2026-04-01T02:24:53.758 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.755+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.15 2026-04-01T02:24:53.758 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.755+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.758 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.755+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.15 does not exist 2026-04-01T02:24:53.758 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.755+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.16 2026-04-01T02:24:53.758 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.755+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.758 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.755+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.16 does not exist 2026-04-01T02:24:53.758 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.755+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.17 2026-04-01T02:24:53.758 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.755+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.758 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.755+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.17 does not exist 2026-04-01T02:24:53.758 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.755+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.18 2026-04-01T02:24:53.758 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.755+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.758 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.756+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.18 does not exist 2026-04-01T02:24:53.758 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.756+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.19 2026-04-01T02:24:53.759 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.756+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.759 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.756+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.19 does not exist 2026-04-01T02:24:53.759 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.756+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.20 2026-04-01T02:24:53.759 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.756+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.759 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.756+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.20 does not exist 2026-04-01T02:24:53.759 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.756+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.21 2026-04-01T02:24:53.759 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.756+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.759 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.757+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.21 does not exist 2026-04-01T02:24:53.759 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.757+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.22 2026-04-01T02:24:53.759 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.757+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.759 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.757+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.22 does not exist 2026-04-01T02:24:53.759 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.757+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.23 2026-04-01T02:24:53.759 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.757+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.760 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.757+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.23 does not exist 2026-04-01T02:24:53.760 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.757+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.24 2026-04-01T02:24:53.760 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.757+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.760 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.757+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.24 does not exist 2026-04-01T02:24:53.760 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.757+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.25 2026-04-01T02:24:53.760 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.757+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.760 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.757+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.25 does not exist 2026-04-01T02:24:53.760 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.757+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.26 2026-04-01T02:24:53.760 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.757+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.760 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.758+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.26 does not exist 2026-04-01T02:24:53.760 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.758+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.27 2026-04-01T02:24:53.760 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.758+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.761 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.758+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.27 does not exist 2026-04-01T02:24:53.761 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.758+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.28 2026-04-01T02:24:53.761 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.758+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.761 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.758+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.28 does not exist 2026-04-01T02:24:53.761 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.758+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.29 2026-04-01T02:24:53.761 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.758+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.761 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.758+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.29 does not exist 2026-04-01T02:24:53.761 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.758+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.30 2026-04-01T02:24:53.761 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.758+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.761 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.759+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.30 does not exist 2026-04-01T02:24:53.761 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.759+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.31 2026-04-01T02:24:53.761 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.759+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.762 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.759+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.31 does not exist 2026-04-01T02:24:53.762 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.759+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.32 2026-04-01T02:24:53.762 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.759+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.762 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.759+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.32 does not exist 2026-04-01T02:24:53.762 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.759+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.33 2026-04-01T02:24:53.762 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.759+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.762 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.759+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.33 does not exist 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.759+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.34 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.759+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.760+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.34 does not exist 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.760+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.35 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.760+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.760+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.35 does not exist 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.760+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.36 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.760+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.760+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.36 does not exist 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.760+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.37 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.760+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.760+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.37 does not exist 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.760+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.38 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.760+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.760+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.38 does not exist 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.760+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.39 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.760+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.761+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.39 does not exist 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.761+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.40 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.761+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.761+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.40 does not exist 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.761+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.41 2026-04-01T02:24:53.763 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.761+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.764 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.761+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.41 does not exist 2026-04-01T02:24:53.764 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.761+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.42 2026-04-01T02:24:53.764 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.761+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.764 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.761+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.42 does not exist 2026-04-01T02:24:53.764 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.761+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.43 2026-04-01T02:24:53.764 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.761+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.764 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.761+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.43 does not exist 2026-04-01T02:24:53.764 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.761+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.44 2026-04-01T02:24:53.764 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.761+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.764 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.762+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.44 does not exist 2026-04-01T02:24:53.764 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.762+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.45 2026-04-01T02:24:53.764 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.762+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.765 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.762+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.45 does not exist 2026-04-01T02:24:53.765 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.762+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.46 2026-04-01T02:24:53.765 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.762+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.765 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.762+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.46 does not exist 2026-04-01T02:24:53.765 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.762+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.47 2026-04-01T02:24:53.765 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.762+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.765 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.762+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.47 does not exist 2026-04-01T02:24:53.765 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.762+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.48 2026-04-01T02:24:53.765 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.762+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.765 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.762+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.48 does not exist 2026-04-01T02:24:53.765 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.762+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.49 2026-04-01T02:24:53.765 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.762+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.765 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.763+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.49 does not exist 2026-04-01T02:24:53.765 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.763+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.50 2026-04-01T02:24:53.765 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.763+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.766 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.763+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.50 does not exist 2026-04-01T02:24:53.766 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.763+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.51 2026-04-01T02:24:53.766 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.763+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.766 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.763+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.51 does not exist 2026-04-01T02:24:53.766 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.763+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.52 2026-04-01T02:24:53.766 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.763+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.766 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.763+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.52 does not exist 2026-04-01T02:24:53.766 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.763+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.53 2026-04-01T02:24:53.766 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.763+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.766 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.763+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.53 does not exist 2026-04-01T02:24:53.766 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.763+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.54 2026-04-01T02:24:53.766 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.763+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.766 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.764+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.54 does not exist 2026-04-01T02:24:53.766 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.764+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.55 2026-04-01T02:24:53.766 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.764+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.767 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.764+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.55 does not exist 2026-04-01T02:24:53.767 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.764+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.56 2026-04-01T02:24:53.767 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.764+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.767 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.764+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.56 does not exist 2026-04-01T02:24:53.767 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.764+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.57 2026-04-01T02:24:53.767 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.764+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.767 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.764+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.57 does not exist 2026-04-01T02:24:53.767 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.764+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.58 2026-04-01T02:24:53.767 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.764+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.767 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.764+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.58 does not exist 2026-04-01T02:24:53.767 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.764+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.59 2026-04-01T02:24:53.767 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.764+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.767 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.765+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.59 does not exist 2026-04-01T02:24:53.767 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.765+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.60 2026-04-01T02:24:53.767 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.765+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.768 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.765+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.60 does not exist 2026-04-01T02:24:53.768 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.765+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.61 2026-04-01T02:24:53.768 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.765+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.768 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.765+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.61 does not exist 2026-04-01T02:24:53.768 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.765+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.62 2026-04-01T02:24:53.768 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.765+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.768 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.765+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.62 does not exist 2026-04-01T02:24:53.768 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.765+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.63 2026-04-01T02:24:53.768 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.765+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.768 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.766+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.63 does not exist 2026-04-01T02:24:53.768 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.766+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.64 2026-04-01T02:24:53.768 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.766+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.768 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.766+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.64 does not exist 2026-04-01T02:24:53.768 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.766+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.65 2026-04-01T02:24:53.768 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.766+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.769 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.766+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.65 does not exist 2026-04-01T02:24:53.769 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.766+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.66 2026-04-01T02:24:53.769 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.766+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.769 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.766+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.66 does not exist 2026-04-01T02:24:53.769 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.766+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.67 2026-04-01T02:24:53.769 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.766+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.769 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.766+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.67 does not exist 2026-04-01T02:24:53.769 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.766+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.68 2026-04-01T02:24:53.769 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.766+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.769 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.767+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.68 does not exist 2026-04-01T02:24:53.769 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.767+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.69 2026-04-01T02:24:53.769 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.767+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.770 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.767+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.69 does not exist 2026-04-01T02:24:53.770 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.767+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.70 2026-04-01T02:24:53.770 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.767+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.770 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.767+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.70 does not exist 2026-04-01T02:24:53.770 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.767+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.71 2026-04-01T02:24:53.770 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.767+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.770 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.767+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.71 does not exist 2026-04-01T02:24:53.770 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.767+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.72 2026-04-01T02:24:53.770 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.767+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.770 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.767+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.72 does not exist 2026-04-01T02:24:53.770 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.767+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.73 2026-04-01T02:24:53.770 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.767+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.770 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.768+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.73 does not exist 2026-04-01T02:24:53.770 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.768+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.74 2026-04-01T02:24:53.770 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.768+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.771 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.768+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.74 does not exist 2026-04-01T02:24:53.771 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.768+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.75 2026-04-01T02:24:53.771 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.768+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.771 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.768+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.75 does not exist 2026-04-01T02:24:53.771 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.768+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.76 2026-04-01T02:24:53.771 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.768+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.771 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.768+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.76 does not exist 2026-04-01T02:24:53.771 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.768+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.77 2026-04-01T02:24:53.771 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.768+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.771 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.769+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.77 does not exist 2026-04-01T02:24:53.771 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.769+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.78 2026-04-01T02:24:53.771 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.769+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.772 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.769+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.78 does not exist 2026-04-01T02:24:53.772 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.769+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.79 2026-04-01T02:24:53.772 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.769+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.772 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.769+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.79 does not exist 2026-04-01T02:24:53.772 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.769+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.80 2026-04-01T02:24:53.772 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.769+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.772 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.769+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.80 does not exist 2026-04-01T02:24:53.772 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.769+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.81 2026-04-01T02:24:53.772 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.769+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.772 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.769+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.81 does not exist 2026-04-01T02:24:53.772 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.769+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.82 2026-04-01T02:24:53.772 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.769+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.772 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.770+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.82 does not exist 2026-04-01T02:24:53.772 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.770+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.83 2026-04-01T02:24:53.772 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.770+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.773 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.770+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.83 does not exist 2026-04-01T02:24:53.773 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.770+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.84 2026-04-01T02:24:53.773 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.770+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.773 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.770+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.84 does not exist 2026-04-01T02:24:53.773 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.770+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.85 2026-04-01T02:24:53.773 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.770+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.773 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.770+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.85 does not exist 2026-04-01T02:24:53.773 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.770+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.86 2026-04-01T02:24:53.773 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.770+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.773 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.771+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.86 does not exist 2026-04-01T02:24:53.773 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.771+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.87 2026-04-01T02:24:53.773 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.771+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.774 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.771+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.87 does not exist 2026-04-01T02:24:53.774 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.771+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.88 2026-04-01T02:24:53.774 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.771+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.774 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.771+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.88 does not exist 2026-04-01T02:24:53.774 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.771+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.89 2026-04-01T02:24:53.774 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.771+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.774 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.771+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.89 does not exist 2026-04-01T02:24:53.774 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.771+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.90 2026-04-01T02:24:53.774 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.771+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.774 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.772+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.90 does not exist 2026-04-01T02:24:53.774 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.772+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.91 2026-04-01T02:24:53.774 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.772+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.774 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.772+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.91 does not exist 2026-04-01T02:24:53.774 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.772+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.92 2026-04-01T02:24:53.775 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.772+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.775 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.772+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.92 does not exist 2026-04-01T02:24:53.775 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.772+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.93 2026-04-01T02:24:53.775 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.772+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.775 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.772+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.93 does not exist 2026-04-01T02:24:53.775 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.772+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.94 2026-04-01T02:24:53.775 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.772+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.775 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.772+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.94 does not exist 2026-04-01T02:24:53.775 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.772+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.95 2026-04-01T02:24:53.775 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.772+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.775 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.95 does not exist 2026-04-01T02:24:53.775 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.96 2026-04-01T02:24:53.775 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.776 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.96 does not exist 2026-04-01T02:24:53.776 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.97 2026-04-01T02:24:53.776 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.776 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.97 does not exist 2026-04-01T02:24:53.776 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.98 2026-04-01T02:24:53.776 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.776 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.98 does not exist 2026-04-01T02:24:53.776 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.99 2026-04-01T02:24:53.776 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.776 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.99 does not exist 2026-04-01T02:24:53.776 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.100 2026-04-01T02:24:53.776 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.776 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.100 does not exist 2026-04-01T02:24:53.777 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.101 2026-04-01T02:24:53.777 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.777 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.101 does not exist 2026-04-01T02:24:53.777 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.102 2026-04-01T02:24:53.777 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.777 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.102 does not exist 2026-04-01T02:24:53.777 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.103 2026-04-01T02:24:53.777 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.777 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.103 does not exist 2026-04-01T02:24:53.777 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.104 2026-04-01T02:24:53.777 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.774+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.777 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.775+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.104 does not exist 2026-04-01T02:24:53.777 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.775+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.105 2026-04-01T02:24:53.777 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.775+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.777 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.775+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.105 does not exist 2026-04-01T02:24:53.778 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.775+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.106 2026-04-01T02:24:53.778 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.775+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.778 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.775+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.106 does not exist 2026-04-01T02:24:53.778 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.775+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.107 2026-04-01T02:24:53.778 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.775+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.778 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.775+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.107 does not exist 2026-04-01T02:24:53.778 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.775+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.108 2026-04-01T02:24:53.778 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.775+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.778 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.775+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.108 does not exist 2026-04-01T02:24:53.778 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.775+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.109 2026-04-01T02:24:53.778 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.775+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.778 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.109 does not exist 2026-04-01T02:24:53.778 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.110 2026-04-01T02:24:53.778 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.779 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.110 does not exist 2026-04-01T02:24:53.779 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.111 2026-04-01T02:24:53.779 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.779 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.111 does not exist 2026-04-01T02:24:53.779 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.112 2026-04-01T02:24:53.779 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.779 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.112 does not exist 2026-04-01T02:24:53.779 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.113 2026-04-01T02:24:53.779 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.779 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.113 does not exist 2026-04-01T02:24:53.779 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.114 2026-04-01T02:24:53.779 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.779 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.114 does not exist 2026-04-01T02:24:53.779 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.115 2026-04-01T02:24:53.779 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.780 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.115 does not exist 2026-04-01T02:24:53.780 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.116 2026-04-01T02:24:53.780 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.780 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.116 does not exist 2026-04-01T02:24:53.780 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.117 2026-04-01T02:24:53.780 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.780 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.117 does not exist 2026-04-01T02:24:53.780 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.118 2026-04-01T02:24:53.780 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.777+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.780 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.118 does not exist 2026-04-01T02:24:53.780 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.119 2026-04-01T02:24:53.780 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.780 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.119 does not exist 2026-04-01T02:24:53.780 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.120 2026-04-01T02:24:53.780 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d72ffd640 20 do_open: entering 2026-04-01T02:24:53.781 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.120 does not exist 2026-04-01T02:24:53.781 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d727fc640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.121 2026-04-01T02:24:53.781 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.781 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.121 does not exist 2026-04-01T02:24:53.781 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d71ffb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.122 2026-04-01T02:24:53.781 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d71ffb640 20 do_open: entering 2026-04-01T02:24:53.781 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.122 does not exist 2026-04-01T02:24:53.781 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d7adbb640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.123 2026-04-01T02:24:53.781 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d7adbb640 20 do_open: entering 2026-04-01T02:24:53.781 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.778+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.123 does not exist 2026-04-01T02:24:53.781 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.779+0000 7f8d78db7640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.124 2026-04-01T02:24:53.781 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.779+0000 7f8d78db7640 20 do_open: entering 2026-04-01T02:24:53.781 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.779+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.124 does not exist 2026-04-01T02:24:53.781 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.779+0000 7f8d73fff640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.125 2026-04-01T02:24:53.782 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.779+0000 7f8d73fff640 20 do_open: entering 2026-04-01T02:24:53.782 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.779+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.125 does not exist 2026-04-01T02:24:53.782 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.779+0000 7f8d737fe640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.126 2026-04-01T02:24:53.782 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.779+0000 7f8d737fe640 20 do_open: entering 2026-04-01T02:24:53.782 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.779+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.126 does not exist 2026-04-01T02:24:53.782 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.779+0000 7f8d7c044640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):59 probing obj=data_log.127 2026-04-01T02:24:53.782 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.779+0000 7f8d7c044640 20 do_open: entering 2026-04-01T02:24:53.782 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.780+0000 7f8d72ffd640 20 boost::asio::awaitable {anonymous}::probe_shard(const DoutPrefixProvider*, neorados::RADOS, const neorados::Object&, const neorados::IOContext&, bool&):78: obj=data_log.127 does not exist 2026-04-01T02:24:53.782 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.780+0000 7f8d72ffd640 20 do_create: entering 2026-04-01T02:24:53.784 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.781+0000 7f8d727fc640 20 do_open: entering 2026-04-01T02:24:53.787 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.784+0000 7f8d7e125900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:24:53.787 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:53.784+0000 7f8d7e125900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:24:56.936 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:56.932+0000 7f8d7e125900 10 rgw_init_ioctx warning: failed to set recovery_priority on default.rgw.meta 2026-04-01T02:24:56.936 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:56.933+0000 7f8d7e125900 5 note: GC not initialized 2026-04-01T02:24:56.936 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:56.933+0000 7f8d227f4640 20 reqs_thread_entry: start 2026-04-01T02:24:56.995 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:56.992+0000 7f8d7e125900 20 init_complete bucket index max shards: 11 2026-04-01T02:24:56.995 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:56.992+0000 7f8d7e125900 20 Filter name: none 2026-04-01T02:24:56.995 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:56.992+0000 7f8d1bfff640 20 reqs_thread_entry: start 2026-04-01T02:24:57.006 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.004+0000 7f8d7e125900 20 remove_watcher() i=7 2026-04-01T02:24:57.007 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.004+0000 7f8d7e125900 2 removed watcher, disabling cache 2026-04-01T02:24:57.007 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.004+0000 7f8d7e125900 20 remove_watcher() i=3 2026-04-01T02:24:57.007 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.004+0000 7f8d7e125900 20 remove_watcher() i=1 2026-04-01T02:24:57.007 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.004+0000 7f8d7e125900 20 remove_watcher() i=0 2026-04-01T02:24:57.007 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.005+0000 7f8d7e125900 20 remove_watcher() i=5 2026-04-01T02:24:57.008 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.005+0000 7f8d7e125900 20 remove_watcher() i=4 2026-04-01T02:24:57.008 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.005+0000 7f8d7e125900 20 remove_watcher() i=6 2026-04-01T02:24:57.008 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.005+0000 7f8d7e125900 20 remove_watcher() i=2 2026-04-01T02:24:57.015 INFO:teuthology.orchestra.run.vm03.stdout:[] 2026-04-01T02:24:57.015 DEBUG:tasks.util.rgw: json result: [] 2026-04-01T02:24:57.015 INFO:tasks.rgw:Configuring storage class = FROZEN 2026-04-01T02:24:57.015 INFO:tasks.util.rgw:rgwadmin: client.0 : ['zonegroup', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'FROZEN'] 2026-04-01T02:24:57.015 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.0', '--cluster', 'ceph', 'zonegroup', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'FROZEN'] 2026-04-01T02:24:57.016 DEBUG:teuthology.orchestra.run.vm03:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph zonegroup placement add --rgw-zone default --placement-id default-placement --storage-class FROZEN 2026-04-01T02:24:57.100 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:57.100 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:57.115 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.112+0000 7f98d8552900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:24:57.115 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.112+0000 7f98d8552900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:24:57.115 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.112+0000 7f987f7fe640 20 reqs_thread_entry: start 2026-04-01T02:24:57.125 INFO:teuthology.orchestra.run.vm03.stdout:[{"key":"default-placement","val":{"name":"default-placement","tags":[],"storage_classes":["FROZEN","STANDARD"]}}] 2026-04-01T02:24:57.125 DEBUG:tasks.util.rgw: json result: [{'key': 'default-placement', 'val': {'name': 'default-placement', 'tags': [], 'storage_classes': ['FROZEN', 'STANDARD']}}] 2026-04-01T02:24:57.125 INFO:tasks.util.rgw:rgwadmin: client.0 : ['zone', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'FROZEN', '--data-pool', 'default.rgw.buckets.data.frozen'] 2026-04-01T02:24:57.125 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.0', '--cluster', 'ceph', 'zone', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'FROZEN', '--data-pool', 'default.rgw.buckets.data.frozen'] 2026-04-01T02:24:57.125 DEBUG:teuthology.orchestra.run.vm03:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph zone placement add --rgw-zone default --placement-id default-placement --storage-class FROZEN --data-pool default.rgw.buckets.data.frozen 2026-04-01T02:24:57.165 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:57.165 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:57.180 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.177+0000 7f3579552900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:24:57.180 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.177+0000 7f3579552900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:24:57.180 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.177+0000 7f3522fed640 20 reqs_thread_entry: start 2026-04-01T02:24:57.192 INFO:teuthology.orchestra.run.vm03.stdout:{"id":"9872c6d3-13db-4797-98fb-df48c9391eb9","name":"default","domain_root":"default.rgw.meta:root","control_pool":"default.rgw.control","dedup_pool":"default.rgw.dedup","gc_pool":"default.rgw.log:gc","lc_pool":"default.rgw.log:lc","log_pool":"default.rgw.log","intent_log_pool":"default.rgw.log:intent","usage_log_pool":"default.rgw.log:usage","roles_pool":"default.rgw.meta:roles","reshard_pool":"default.rgw.log:reshard","user_keys_pool":"default.rgw.meta:users.keys","user_email_pool":"default.rgw.meta:users.email","user_swift_pool":"default.rgw.meta:users.swift","user_uid_pool":"default.rgw.meta:users.uid","otp_pool":"default.rgw.otp","notif_pool":"default.rgw.log:notif","topics_pool":"default.rgw.meta:topics","account_pool":"default.rgw.meta:accounts","group_pool":"default.rgw.meta:groups","system_key":{"access_key":"","secret_key":""},"placement_pools":[{"key":"default-placement","val":{"index_pool":"default.rgw.buckets.index","storage_classes":{"FROZEN":{"data_pool":"default.rgw.buckets.data.frozen"},"STANDARD":{"data_pool":"default.rgw.buckets.data"}},"data_extra_pool":"default.rgw.buckets.non-ec","index_type":0,"inline_data":true}}],"realm_id":"","restore_pool":"default.rgw.log:restore"} 2026-04-01T02:24:57.192 DEBUG:tasks.util.rgw: json result: {'id': '9872c6d3-13db-4797-98fb-df48c9391eb9', 'name': 'default', 'domain_root': 'default.rgw.meta:root', 'control_pool': 'default.rgw.control', 'dedup_pool': 'default.rgw.dedup', 'gc_pool': 'default.rgw.log:gc', 'lc_pool': 'default.rgw.log:lc', 'log_pool': 'default.rgw.log', 'intent_log_pool': 'default.rgw.log:intent', 'usage_log_pool': 'default.rgw.log:usage', 'roles_pool': 'default.rgw.meta:roles', 'reshard_pool': 'default.rgw.log:reshard', 'user_keys_pool': 'default.rgw.meta:users.keys', 'user_email_pool': 'default.rgw.meta:users.email', 'user_swift_pool': 'default.rgw.meta:users.swift', 'user_uid_pool': 'default.rgw.meta:users.uid', 'otp_pool': 'default.rgw.otp', 'notif_pool': 'default.rgw.log:notif', 'topics_pool': 'default.rgw.meta:topics', 'account_pool': 'default.rgw.meta:accounts', 'group_pool': 'default.rgw.meta:groups', 'system_key': {'access_key': '', 'secret_key': ''}, 'placement_pools': [{'key': 'default-placement', 'val': {'index_pool': 'default.rgw.buckets.index', 'storage_classes': {'FROZEN': {'data_pool': 'default.rgw.buckets.data.frozen'}, 'STANDARD': {'data_pool': 'default.rgw.buckets.data'}}, 'data_extra_pool': 'default.rgw.buckets.non-ec', 'index_type': 0, 'inline_data': True}}], 'realm_id': '', 'restore_pool': 'default.rgw.log:restore'} 2026-04-01T02:24:57.192 INFO:tasks.rgw:Configuring storage class = LUKEWARM 2026-04-01T02:24:57.192 INFO:tasks.util.rgw:rgwadmin: client.0 : ['zonegroup', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'LUKEWARM'] 2026-04-01T02:24:57.192 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.0', '--cluster', 'ceph', 'zonegroup', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'LUKEWARM'] 2026-04-01T02:24:57.192 DEBUG:teuthology.orchestra.run.vm03:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph zonegroup placement add --rgw-zone default --placement-id default-placement --storage-class LUKEWARM 2026-04-01T02:24:57.270 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:57.270 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:57.282 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.280+0000 7f8ec4f5f900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:24:57.282 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.280+0000 7f8ec4f5f900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:24:57.283 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.280+0000 7f8e6e7ec640 20 reqs_thread_entry: start 2026-04-01T02:24:57.291 INFO:teuthology.orchestra.run.vm03.stdout:[{"key":"default-placement","val":{"name":"default-placement","tags":[],"storage_classes":["FROZEN","LUKEWARM","STANDARD"]}}] 2026-04-01T02:24:57.291 DEBUG:tasks.util.rgw: json result: [{'key': 'default-placement', 'val': {'name': 'default-placement', 'tags': [], 'storage_classes': ['FROZEN', 'LUKEWARM', 'STANDARD']}}] 2026-04-01T02:24:57.291 INFO:tasks.util.rgw:rgwadmin: client.0 : ['zone', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'LUKEWARM', '--data-pool', 'default.rgw.buckets.data.lukewarm'] 2026-04-01T02:24:57.291 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.0', '--cluster', 'ceph', 'zone', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'LUKEWARM', '--data-pool', 'default.rgw.buckets.data.lukewarm'] 2026-04-01T02:24:57.291 DEBUG:teuthology.orchestra.run.vm03:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph zone placement add --rgw-zone default --placement-id default-placement --storage-class LUKEWARM --data-pool default.rgw.buckets.data.lukewarm 2026-04-01T02:24:57.373 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:57.373 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:57.384 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.381+0000 7f09dc763900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:24:57.384 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.381+0000 7f09dc763900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:24:57.384 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:24:57.382+0000 7f09857f2640 20 reqs_thread_entry: start 2026-04-01T02:24:57.393 INFO:teuthology.orchestra.run.vm03.stdout:{"id":"9872c6d3-13db-4797-98fb-df48c9391eb9","name":"default","domain_root":"default.rgw.meta:root","control_pool":"default.rgw.control","dedup_pool":"default.rgw.dedup","gc_pool":"default.rgw.log:gc","lc_pool":"default.rgw.log:lc","log_pool":"default.rgw.log","intent_log_pool":"default.rgw.log:intent","usage_log_pool":"default.rgw.log:usage","roles_pool":"default.rgw.meta:roles","reshard_pool":"default.rgw.log:reshard","user_keys_pool":"default.rgw.meta:users.keys","user_email_pool":"default.rgw.meta:users.email","user_swift_pool":"default.rgw.meta:users.swift","user_uid_pool":"default.rgw.meta:users.uid","otp_pool":"default.rgw.otp","notif_pool":"default.rgw.log:notif","topics_pool":"default.rgw.meta:topics","account_pool":"default.rgw.meta:accounts","group_pool":"default.rgw.meta:groups","system_key":{"access_key":"","secret_key":""},"placement_pools":[{"key":"default-placement","val":{"index_pool":"default.rgw.buckets.index","storage_classes":{"FROZEN":{"data_pool":"default.rgw.buckets.data.frozen"},"LUKEWARM":{"data_pool":"default.rgw.buckets.data.lukewarm"},"STANDARD":{"data_pool":"default.rgw.buckets.data"}},"data_extra_pool":"default.rgw.buckets.non-ec","index_type":0,"inline_data":true}}],"realm_id":"","restore_pool":"default.rgw.log:restore"} 2026-04-01T02:24:57.393 DEBUG:tasks.util.rgw: json result: {'id': '9872c6d3-13db-4797-98fb-df48c9391eb9', 'name': 'default', 'domain_root': 'default.rgw.meta:root', 'control_pool': 'default.rgw.control', 'dedup_pool': 'default.rgw.dedup', 'gc_pool': 'default.rgw.log:gc', 'lc_pool': 'default.rgw.log:lc', 'log_pool': 'default.rgw.log', 'intent_log_pool': 'default.rgw.log:intent', 'usage_log_pool': 'default.rgw.log:usage', 'roles_pool': 'default.rgw.meta:roles', 'reshard_pool': 'default.rgw.log:reshard', 'user_keys_pool': 'default.rgw.meta:users.keys', 'user_email_pool': 'default.rgw.meta:users.email', 'user_swift_pool': 'default.rgw.meta:users.swift', 'user_uid_pool': 'default.rgw.meta:users.uid', 'otp_pool': 'default.rgw.otp', 'notif_pool': 'default.rgw.log:notif', 'topics_pool': 'default.rgw.meta:topics', 'account_pool': 'default.rgw.meta:accounts', 'group_pool': 'default.rgw.meta:groups', 'system_key': {'access_key': '', 'secret_key': ''}, 'placement_pools': [{'key': 'default-placement', 'val': {'index_pool': 'default.rgw.buckets.index', 'storage_classes': {'FROZEN': {'data_pool': 'default.rgw.buckets.data.frozen'}, 'LUKEWARM': {'data_pool': 'default.rgw.buckets.data.lukewarm'}, 'STANDARD': {'data_pool': 'default.rgw.buckets.data'}}, 'data_extra_pool': 'default.rgw.buckets.non-ec', 'index_type': 0, 'inline_data': True}}], 'realm_id': '', 'restore_pool': 'default.rgw.log:restore'} 2026-04-01T02:24:57.393 INFO:tasks.util.rgw:rgwadmin: client.1 : ['user', 'list'] 2026-04-01T02:24:57.393 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.1', '--cluster', 'ceph', 'user', 'list'] 2026-04-01T02:24:57.393 DEBUG:teuthology.orchestra.run.vm06:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.1 --cluster ceph user list 2026-04-01T02:24:57.429 INFO:teuthology.orchestra.run.vm06.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:57.429 INFO:teuthology.orchestra.run.vm06.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:57.447 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.445+0000 7f2035d32900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.449 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.446+0000 7f2035d32900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:57.449 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.446+0000 7f2035d32900 20 realm 2026-04-01T02:24:57.449 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.446+0000 7f2035d32900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.449 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.447+0000 7f2035d32900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:57.449 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.447+0000 7f2035d32900 4 RGWPeriod::init failed to init realm id : (2) No such file or directory 2026-04-01T02:24:57.449 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.447+0000 7f2035d32900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.449 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.447+0000 7f2035d32900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:57.449 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.447+0000 7f2035d32900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.450 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.448+0000 7f2035d32900 20 rados_obj.operate() r=0 bl.length=46 2026-04-01T02:24:57.450 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.448+0000 7f2035d32900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.451 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.448+0000 7f2035d32900 20 rados_obj.operate() r=0 bl.length=1190 2026-04-01T02:24:57.451 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.449+0000 7f2035d32900 20 searching for the correct realm 2026-04-01T02:24:57.461 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.458+0000 7f2035d32900 20 RGWRados::pool_iterate: got zone_info.9872c6d3-13db-4797-98fb-df48c9391eb9 2026-04-01T02:24:57.461 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.458+0000 7f2035d32900 20 RGWRados::pool_iterate: got default.zonegroup. 2026-04-01T02:24:57.461 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.458+0000 7f2035d32900 20 RGWRados::pool_iterate: got zonegroup_info.13fe02e2-6f52-4a18-a50f-ad9150d4f62b 2026-04-01T02:24:57.461 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.458+0000 7f2035d32900 20 RGWRados::pool_iterate: got default.zone. 2026-04-01T02:24:57.461 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.458+0000 7f2035d32900 20 RGWRados::pool_iterate: got zone_names.default 2026-04-01T02:24:57.461 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.458+0000 7f2035d32900 20 RGWRados::pool_iterate: got zonegroups_names.default 2026-04-01T02:24:57.461 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.458+0000 7f2035d32900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.461 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.459+0000 7f2035d32900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:57.461 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.459+0000 7f2035d32900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.461 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.459+0000 7f2035d32900 20 rados_obj.operate() r=0 bl.length=46 2026-04-01T02:24:57.462 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.459+0000 7f2035d32900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.462 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.459+0000 7f2035d32900 20 rados_obj.operate() r=0 bl.length=470 2026-04-01T02:24:57.462 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.459+0000 7f2035d32900 20 zone default found 2026-04-01T02:24:57.462 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.459+0000 7f2035d32900 4 Realm: () 2026-04-01T02:24:57.462 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.459+0000 7f2035d32900 4 ZoneGroup: default (13fe02e2-6f52-4a18-a50f-ad9150d4f62b) 2026-04-01T02:24:57.462 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.459+0000 7f2035d32900 4 Zone: default (9872c6d3-13db-4797-98fb-df48c9391eb9) 2026-04-01T02:24:57.462 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.459+0000 7f2035d32900 10 cannot find current period zonegroup using local zonegroup configuration 2026-04-01T02:24:57.462 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.459+0000 7f2035d32900 20 zonegroup default 2026-04-01T02:24:57.462 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.459+0000 7f2035d32900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.462 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.460+0000 7f2035d32900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:57.462 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.460+0000 7f2035d32900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.463 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.460+0000 7f2035d32900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:57.463 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.460+0000 7f2035d32900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.463 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.460+0000 7f2035d32900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:57.463 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.461+0000 7f2035d32900 20 started sync module instance, tier type = 2026-04-01T02:24:57.463 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.461+0000 7f2035d32900 20 started zone id=9872c6d3-13db-4797-98fb-df48c9391eb9 (name=default) with tier type = 2026-04-01T02:24:57.467 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.464+0000 7f2035d32900 20 add_watcher() i=3 2026-04-01T02:24:57.467 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.464+0000 7f2035d32900 20 add_watcher() i=0 2026-04-01T02:24:57.467 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.465+0000 7f2035d32900 20 add_watcher() i=4 2026-04-01T02:24:57.467 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.465+0000 7f2035d32900 20 add_watcher() i=1 2026-04-01T02:24:57.468 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.465+0000 7f2035d32900 20 add_watcher() i=5 2026-04-01T02:24:57.468 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.465+0000 7f2035d32900 20 add_watcher() i=2 2026-04-01T02:24:57.468 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.465+0000 7f2035d32900 20 add_watcher() i=7 2026-04-01T02:24:57.468 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.465+0000 7f2035d32900 20 add_watcher() i=6 2026-04-01T02:24:57.468 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.465+0000 7f2035d32900 2 all 8 watchers are set, enabling cache 2026-04-01T02:24:57.471 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.468+0000 7f2035d32900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:24:57.471 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.468+0000 7f2035d32900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:24:57.471 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.468+0000 7f2035d32900 5 note: GC not initialized 2026-04-01T02:24:57.471 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.468+0000 7f1fdefed640 20 reqs_thread_entry: start 2026-04-01T02:24:57.516 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.514+0000 7f2035d32900 20 init_complete bucket index max shards: 11 2026-04-01T02:24:57.516 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.514+0000 7f2035d32900 20 Filter name: none 2026-04-01T02:24:57.517 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.514+0000 7f1fdcfe9640 20 reqs_thread_entry: start 2026-04-01T02:24:57.528 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.525+0000 7f2035d32900 20 remove_watcher() i=4 2026-04-01T02:24:57.528 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.525+0000 7f2035d32900 2 removed watcher, disabling cache 2026-04-01T02:24:57.528 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.525+0000 7f2035d32900 20 remove_watcher() i=3 2026-04-01T02:24:57.528 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.526+0000 7f2035d32900 20 remove_watcher() i=1 2026-04-01T02:24:57.528 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.526+0000 7f2035d32900 20 remove_watcher() i=0 2026-04-01T02:24:57.528 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.526+0000 7f2035d32900 20 remove_watcher() i=5 2026-04-01T02:24:57.528 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.526+0000 7f2035d32900 20 remove_watcher() i=6 2026-04-01T02:24:57.529 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.526+0000 7f2035d32900 20 remove_watcher() i=2 2026-04-01T02:24:57.529 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.526+0000 7f2035d32900 20 remove_watcher() i=7 2026-04-01T02:24:57.535 INFO:teuthology.orchestra.run.vm06.stdout:[] 2026-04-01T02:24:57.535 DEBUG:tasks.util.rgw: json result: [] 2026-04-01T02:24:57.535 INFO:tasks.rgw:Configuring storage class = FROZEN 2026-04-01T02:24:57.535 INFO:tasks.util.rgw:rgwadmin: client.1 : ['zonegroup', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'FROZEN'] 2026-04-01T02:24:57.535 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.1', '--cluster', 'ceph', 'zonegroup', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'FROZEN'] 2026-04-01T02:24:57.535 DEBUG:teuthology.orchestra.run.vm06:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.1 --cluster ceph zonegroup placement add --rgw-zone default --placement-id default-placement --storage-class FROZEN 2026-04-01T02:24:57.613 INFO:teuthology.orchestra.run.vm06.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:57.613 INFO:teuthology.orchestra.run.vm06.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:57.625 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.622+0000 7f4c4b374900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:24:57.625 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.622+0000 7f4c4b374900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:24:57.625 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.622+0000 7f4bf57ea640 20 reqs_thread_entry: start 2026-04-01T02:24:57.632 INFO:teuthology.orchestra.run.vm06.stdout:[{"key":"default-placement","val":{"name":"default-placement","tags":[],"storage_classes":["FROZEN","LUKEWARM","STANDARD"]}}] 2026-04-01T02:24:57.632 DEBUG:tasks.util.rgw: json result: [{'key': 'default-placement', 'val': {'name': 'default-placement', 'tags': [], 'storage_classes': ['FROZEN', 'LUKEWARM', 'STANDARD']}}] 2026-04-01T02:24:57.632 INFO:tasks.util.rgw:rgwadmin: client.1 : ['zone', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'FROZEN', '--data-pool', 'default.rgw.buckets.data.frozen'] 2026-04-01T02:24:57.633 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.1', '--cluster', 'ceph', 'zone', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'FROZEN', '--data-pool', 'default.rgw.buckets.data.frozen'] 2026-04-01T02:24:57.633 DEBUG:teuthology.orchestra.run.vm06:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.1 --cluster ceph zone placement add --rgw-zone default --placement-id default-placement --storage-class FROZEN --data-pool default.rgw.buckets.data.frozen 2026-04-01T02:24:57.712 INFO:teuthology.orchestra.run.vm06.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:57.712 INFO:teuthology.orchestra.run.vm06.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:57.724 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.721+0000 7f8de2af2900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:24:57.724 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.721+0000 7f8de2af2900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:24:57.724 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.721+0000 7f8d8d7f2640 20 reqs_thread_entry: start 2026-04-01T02:24:57.733 INFO:teuthology.orchestra.run.vm06.stdout:{"id":"9872c6d3-13db-4797-98fb-df48c9391eb9","name":"default","domain_root":"default.rgw.meta:root","control_pool":"default.rgw.control","dedup_pool":"default.rgw.dedup","gc_pool":"default.rgw.log:gc","lc_pool":"default.rgw.log:lc","log_pool":"default.rgw.log","intent_log_pool":"default.rgw.log:intent","usage_log_pool":"default.rgw.log:usage","roles_pool":"default.rgw.meta:roles","reshard_pool":"default.rgw.log:reshard","user_keys_pool":"default.rgw.meta:users.keys","user_email_pool":"default.rgw.meta:users.email","user_swift_pool":"default.rgw.meta:users.swift","user_uid_pool":"default.rgw.meta:users.uid","otp_pool":"default.rgw.otp","notif_pool":"default.rgw.log:notif","topics_pool":"default.rgw.meta:topics","account_pool":"default.rgw.meta:accounts","group_pool":"default.rgw.meta:groups","system_key":{"access_key":"","secret_key":""},"placement_pools":[{"key":"default-placement","val":{"index_pool":"default.rgw.buckets.index","storage_classes":{"FROZEN":{"data_pool":"default.rgw.buckets.data.frozen"},"LUKEWARM":{"data_pool":"default.rgw.buckets.data.lukewarm"},"STANDARD":{"data_pool":"default.rgw.buckets.data"}},"data_extra_pool":"default.rgw.buckets.non-ec","index_type":0,"inline_data":true}}],"realm_id":"","restore_pool":"default.rgw.log:restore"} 2026-04-01T02:24:57.733 DEBUG:tasks.util.rgw: json result: {'id': '9872c6d3-13db-4797-98fb-df48c9391eb9', 'name': 'default', 'domain_root': 'default.rgw.meta:root', 'control_pool': 'default.rgw.control', 'dedup_pool': 'default.rgw.dedup', 'gc_pool': 'default.rgw.log:gc', 'lc_pool': 'default.rgw.log:lc', 'log_pool': 'default.rgw.log', 'intent_log_pool': 'default.rgw.log:intent', 'usage_log_pool': 'default.rgw.log:usage', 'roles_pool': 'default.rgw.meta:roles', 'reshard_pool': 'default.rgw.log:reshard', 'user_keys_pool': 'default.rgw.meta:users.keys', 'user_email_pool': 'default.rgw.meta:users.email', 'user_swift_pool': 'default.rgw.meta:users.swift', 'user_uid_pool': 'default.rgw.meta:users.uid', 'otp_pool': 'default.rgw.otp', 'notif_pool': 'default.rgw.log:notif', 'topics_pool': 'default.rgw.meta:topics', 'account_pool': 'default.rgw.meta:accounts', 'group_pool': 'default.rgw.meta:groups', 'system_key': {'access_key': '', 'secret_key': ''}, 'placement_pools': [{'key': 'default-placement', 'val': {'index_pool': 'default.rgw.buckets.index', 'storage_classes': {'FROZEN': {'data_pool': 'default.rgw.buckets.data.frozen'}, 'LUKEWARM': {'data_pool': 'default.rgw.buckets.data.lukewarm'}, 'STANDARD': {'data_pool': 'default.rgw.buckets.data'}}, 'data_extra_pool': 'default.rgw.buckets.non-ec', 'index_type': 0, 'inline_data': True}}], 'realm_id': '', 'restore_pool': 'default.rgw.log:restore'} 2026-04-01T02:24:57.733 INFO:tasks.rgw:Configuring storage class = LUKEWARM 2026-04-01T02:24:57.733 INFO:tasks.util.rgw:rgwadmin: client.1 : ['zonegroup', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'LUKEWARM'] 2026-04-01T02:24:57.733 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.1', '--cluster', 'ceph', 'zonegroup', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'LUKEWARM'] 2026-04-01T02:24:57.734 DEBUG:teuthology.orchestra.run.vm06:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.1 --cluster ceph zonegroup placement add --rgw-zone default --placement-id default-placement --storage-class LUKEWARM 2026-04-01T02:24:57.810 INFO:teuthology.orchestra.run.vm06.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:57.810 INFO:teuthology.orchestra.run.vm06.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:57.822 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.820+0000 7f5132974900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:24:57.822 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.820+0000 7f5132974900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:24:57.823 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.820+0000 7f50d77fe640 20 reqs_thread_entry: start 2026-04-01T02:24:57.831 INFO:teuthology.orchestra.run.vm06.stdout:[{"key":"default-placement","val":{"name":"default-placement","tags":[],"storage_classes":["FROZEN","LUKEWARM","STANDARD"]}}] 2026-04-01T02:24:57.831 DEBUG:tasks.util.rgw: json result: [{'key': 'default-placement', 'val': {'name': 'default-placement', 'tags': [], 'storage_classes': ['FROZEN', 'LUKEWARM', 'STANDARD']}}] 2026-04-01T02:24:57.831 INFO:tasks.util.rgw:rgwadmin: client.1 : ['zone', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'LUKEWARM', '--data-pool', 'default.rgw.buckets.data.lukewarm'] 2026-04-01T02:24:57.831 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.1', '--cluster', 'ceph', 'zone', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'LUKEWARM', '--data-pool', 'default.rgw.buckets.data.lukewarm'] 2026-04-01T02:24:57.831 DEBUG:teuthology.orchestra.run.vm06:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.1 --cluster ceph zone placement add --rgw-zone default --placement-id default-placement --storage-class LUKEWARM --data-pool default.rgw.buckets.data.lukewarm 2026-04-01T02:24:57.911 INFO:teuthology.orchestra.run.vm06.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:57.911 INFO:teuthology.orchestra.run.vm06.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:57.924 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.922+0000 7f64a6b23900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:24:57.924 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.922+0000 7f64a6b23900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:24:57.924 INFO:teuthology.orchestra.run.vm06.stderr:2026-04-01T02:24:57.922+0000 7f6450ff1640 20 reqs_thread_entry: start 2026-04-01T02:24:57.934 INFO:teuthology.orchestra.run.vm06.stdout:{"id":"9872c6d3-13db-4797-98fb-df48c9391eb9","name":"default","domain_root":"default.rgw.meta:root","control_pool":"default.rgw.control","dedup_pool":"default.rgw.dedup","gc_pool":"default.rgw.log:gc","lc_pool":"default.rgw.log:lc","log_pool":"default.rgw.log","intent_log_pool":"default.rgw.log:intent","usage_log_pool":"default.rgw.log:usage","roles_pool":"default.rgw.meta:roles","reshard_pool":"default.rgw.log:reshard","user_keys_pool":"default.rgw.meta:users.keys","user_email_pool":"default.rgw.meta:users.email","user_swift_pool":"default.rgw.meta:users.swift","user_uid_pool":"default.rgw.meta:users.uid","otp_pool":"default.rgw.otp","notif_pool":"default.rgw.log:notif","topics_pool":"default.rgw.meta:topics","account_pool":"default.rgw.meta:accounts","group_pool":"default.rgw.meta:groups","system_key":{"access_key":"","secret_key":""},"placement_pools":[{"key":"default-placement","val":{"index_pool":"default.rgw.buckets.index","storage_classes":{"FROZEN":{"data_pool":"default.rgw.buckets.data.frozen"},"LUKEWARM":{"data_pool":"default.rgw.buckets.data.lukewarm"},"STANDARD":{"data_pool":"default.rgw.buckets.data"}},"data_extra_pool":"default.rgw.buckets.non-ec","index_type":0,"inline_data":true}}],"realm_id":"","restore_pool":"default.rgw.log:restore"} 2026-04-01T02:24:57.934 DEBUG:tasks.util.rgw: json result: {'id': '9872c6d3-13db-4797-98fb-df48c9391eb9', 'name': 'default', 'domain_root': 'default.rgw.meta:root', 'control_pool': 'default.rgw.control', 'dedup_pool': 'default.rgw.dedup', 'gc_pool': 'default.rgw.log:gc', 'lc_pool': 'default.rgw.log:lc', 'log_pool': 'default.rgw.log', 'intent_log_pool': 'default.rgw.log:intent', 'usage_log_pool': 'default.rgw.log:usage', 'roles_pool': 'default.rgw.meta:roles', 'reshard_pool': 'default.rgw.log:reshard', 'user_keys_pool': 'default.rgw.meta:users.keys', 'user_email_pool': 'default.rgw.meta:users.email', 'user_swift_pool': 'default.rgw.meta:users.swift', 'user_uid_pool': 'default.rgw.meta:users.uid', 'otp_pool': 'default.rgw.otp', 'notif_pool': 'default.rgw.log:notif', 'topics_pool': 'default.rgw.meta:topics', 'account_pool': 'default.rgw.meta:accounts', 'group_pool': 'default.rgw.meta:groups', 'system_key': {'access_key': '', 'secret_key': ''}, 'placement_pools': [{'key': 'default-placement', 'val': {'index_pool': 'default.rgw.buckets.index', 'storage_classes': {'FROZEN': {'data_pool': 'default.rgw.buckets.data.frozen'}, 'LUKEWARM': {'data_pool': 'default.rgw.buckets.data.lukewarm'}, 'STANDARD': {'data_pool': 'default.rgw.buckets.data'}}, 'data_extra_pool': 'default.rgw.buckets.non-ec', 'index_type': 0, 'inline_data': True}}], 'realm_id': '', 'restore_pool': 'default.rgw.log:restore'} 2026-04-01T02:24:57.934 INFO:tasks.util.rgw:rgwadmin: client.2 : ['user', 'list'] 2026-04-01T02:24:57.934 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.2', '--cluster', 'ceph', 'user', 'list'] 2026-04-01T02:24:57.934 DEBUG:teuthology.orchestra.run.vm08:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.2 --cluster ceph user list 2026-04-01T02:24:57.971 INFO:teuthology.orchestra.run.vm08.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:57.971 INFO:teuthology.orchestra.run.vm08.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:57.989 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:57.986+0000 7fa0c8344900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.991 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:57.988+0000 7fa0c8344900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:57.991 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:57.988+0000 7fa0c8344900 20 realm 2026-04-01T02:24:57.991 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:57.988+0000 7fa0c8344900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.991 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:57.989+0000 7fa0c8344900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:57.991 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:57.989+0000 7fa0c8344900 4 RGWPeriod::init failed to init realm id : (2) No such file or directory 2026-04-01T02:24:57.991 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:57.989+0000 7fa0c8344900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.992 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:57.989+0000 7fa0c8344900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:57.992 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:57.989+0000 7fa0c8344900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.992 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:57.990+0000 7fa0c8344900 20 rados_obj.operate() r=0 bl.length=46 2026-04-01T02:24:57.993 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:57.990+0000 7fa0c8344900 20 rados->read ofs=0 len=0 2026-04-01T02:24:57.993 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:57.991+0000 7fa0c8344900 20 rados_obj.operate() r=0 bl.length=1190 2026-04-01T02:24:57.993 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:57.991+0000 7fa0c8344900 20 searching for the correct realm 2026-04-01T02:24:58.004 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.002+0000 7fa0c8344900 20 RGWRados::pool_iterate: got zone_info.9872c6d3-13db-4797-98fb-df48c9391eb9 2026-04-01T02:24:58.005 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.002+0000 7fa0c8344900 20 RGWRados::pool_iterate: got default.zonegroup. 2026-04-01T02:24:58.005 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.002+0000 7fa0c8344900 20 RGWRados::pool_iterate: got zonegroup_info.13fe02e2-6f52-4a18-a50f-ad9150d4f62b 2026-04-01T02:24:58.005 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.002+0000 7fa0c8344900 20 RGWRados::pool_iterate: got default.zone. 2026-04-01T02:24:58.005 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.002+0000 7fa0c8344900 20 RGWRados::pool_iterate: got zone_names.default 2026-04-01T02:24:58.005 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.002+0000 7fa0c8344900 20 RGWRados::pool_iterate: got zonegroups_names.default 2026-04-01T02:24:58.005 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.002+0000 7fa0c8344900 20 rados->read ofs=0 len=0 2026-04-01T02:24:58.005 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.002+0000 7fa0c8344900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:58.005 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.002+0000 7fa0c8344900 20 rados->read ofs=0 len=0 2026-04-01T02:24:58.006 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.004+0000 7fa0c8344900 20 rados_obj.operate() r=0 bl.length=46 2026-04-01T02:24:58.006 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.004+0000 7fa0c8344900 20 rados->read ofs=0 len=0 2026-04-01T02:24:58.006 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.004+0000 7fa0c8344900 20 rados_obj.operate() r=0 bl.length=470 2026-04-01T02:24:58.006 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.004+0000 7fa0c8344900 20 zone default found 2026-04-01T02:24:58.006 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.004+0000 7fa0c8344900 4 Realm: () 2026-04-01T02:24:58.006 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.004+0000 7fa0c8344900 4 ZoneGroup: default (13fe02e2-6f52-4a18-a50f-ad9150d4f62b) 2026-04-01T02:24:58.006 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.004+0000 7fa0c8344900 4 Zone: default (9872c6d3-13db-4797-98fb-df48c9391eb9) 2026-04-01T02:24:58.006 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.004+0000 7fa0c8344900 10 cannot find current period zonegroup using local zonegroup configuration 2026-04-01T02:24:58.006 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.004+0000 7fa0c8344900 20 zonegroup default 2026-04-01T02:24:58.006 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.004+0000 7fa0c8344900 20 rados->read ofs=0 len=0 2026-04-01T02:24:58.007 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.004+0000 7fa0c8344900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:58.007 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.004+0000 7fa0c8344900 20 rados->read ofs=0 len=0 2026-04-01T02:24:58.007 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.004+0000 7fa0c8344900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:58.007 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.004+0000 7fa0c8344900 20 rados->read ofs=0 len=0 2026-04-01T02:24:58.008 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.005+0000 7fa0c8344900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:24:58.008 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.005+0000 7fa0c8344900 20 started sync module instance, tier type = 2026-04-01T02:24:58.008 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.005+0000 7fa0c8344900 20 started zone id=9872c6d3-13db-4797-98fb-df48c9391eb9 (name=default) with tier type = 2026-04-01T02:24:58.011 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.008+0000 7fa0c8344900 20 add_watcher() i=0 2026-04-01T02:24:58.011 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.008+0000 7fa0c8344900 20 add_watcher() i=3 2026-04-01T02:24:58.011 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.008+0000 7fa0c8344900 20 add_watcher() i=5 2026-04-01T02:24:58.012 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.009+0000 7fa0c8344900 20 add_watcher() i=2 2026-04-01T02:24:58.012 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.009+0000 7fa0c8344900 20 add_watcher() i=7 2026-04-01T02:24:58.012 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.009+0000 7fa0c8344900 20 add_watcher() i=4 2026-04-01T02:24:58.012 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.009+0000 7fa0c8344900 20 add_watcher() i=6 2026-04-01T02:24:58.012 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.010+0000 7fa0c8344900 20 add_watcher() i=1 2026-04-01T02:24:58.012 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.010+0000 7fa0c8344900 2 all 8 watchers are set, enabling cache 2026-04-01T02:24:58.015 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.013+0000 7fa0c8344900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:24:58.015 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.013+0000 7fa0c8344900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:24:58.015 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.013+0000 7fa0c8344900 5 note: GC not initialized 2026-04-01T02:24:58.015 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.013+0000 7fa070ff1640 20 reqs_thread_entry: start 2026-04-01T02:24:58.060 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.058+0000 7fa0c8344900 20 init_complete bucket index max shards: 11 2026-04-01T02:24:58.060 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.058+0000 7fa0c8344900 20 Filter name: none 2026-04-01T02:24:58.061 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.058+0000 7fa06a7fc640 20 reqs_thread_entry: start 2026-04-01T02:24:58.071 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.068+0000 7fa0c8344900 20 remove_watcher() i=0 2026-04-01T02:24:58.071 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.068+0000 7fa0c8344900 2 removed watcher, disabling cache 2026-04-01T02:24:58.071 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.068+0000 7fa0c8344900 20 remove_watcher() i=6 2026-04-01T02:24:58.071 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.068+0000 7fa0c8344900 20 remove_watcher() i=3 2026-04-01T02:24:58.071 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.068+0000 7fa0c8344900 20 remove_watcher() i=2 2026-04-01T02:24:58.071 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.069+0000 7fa0c8344900 20 remove_watcher() i=7 2026-04-01T02:24:58.071 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.069+0000 7fa0c8344900 20 remove_watcher() i=4 2026-04-01T02:24:58.071 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.069+0000 7fa0c8344900 20 remove_watcher() i=1 2026-04-01T02:24:58.071 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.069+0000 7fa0c8344900 20 remove_watcher() i=5 2026-04-01T02:24:58.078 INFO:teuthology.orchestra.run.vm08.stdout:[] 2026-04-01T02:24:58.078 DEBUG:tasks.util.rgw: json result: [] 2026-04-01T02:24:58.078 INFO:tasks.rgw:Configuring storage class = FROZEN 2026-04-01T02:24:58.078 INFO:tasks.util.rgw:rgwadmin: client.2 : ['zonegroup', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'FROZEN'] 2026-04-01T02:24:58.078 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.2', '--cluster', 'ceph', 'zonegroup', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'FROZEN'] 2026-04-01T02:24:58.078 DEBUG:teuthology.orchestra.run.vm08:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.2 --cluster ceph zonegroup placement add --rgw-zone default --placement-id default-placement --storage-class FROZEN 2026-04-01T02:24:58.159 INFO:teuthology.orchestra.run.vm08.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:58.159 INFO:teuthology.orchestra.run.vm08.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:58.175 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.172+0000 7fd6da175900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:24:58.175 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.172+0000 7fd6da175900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:24:58.175 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.172+0000 7fd684ff1640 20 reqs_thread_entry: start 2026-04-01T02:24:58.185 INFO:teuthology.orchestra.run.vm08.stdout:[{"key":"default-placement","val":{"name":"default-placement","tags":[],"storage_classes":["FROZEN","LUKEWARM","STANDARD"]}}] 2026-04-01T02:24:58.185 DEBUG:tasks.util.rgw: json result: [{'key': 'default-placement', 'val': {'name': 'default-placement', 'tags': [], 'storage_classes': ['FROZEN', 'LUKEWARM', 'STANDARD']}}] 2026-04-01T02:24:58.185 INFO:tasks.util.rgw:rgwadmin: client.2 : ['zone', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'FROZEN', '--data-pool', 'default.rgw.buckets.data.frozen'] 2026-04-01T02:24:58.185 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.2', '--cluster', 'ceph', 'zone', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'FROZEN', '--data-pool', 'default.rgw.buckets.data.frozen'] 2026-04-01T02:24:58.185 DEBUG:teuthology.orchestra.run.vm08:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.2 --cluster ceph zone placement add --rgw-zone default --placement-id default-placement --storage-class FROZEN --data-pool default.rgw.buckets.data.frozen 2026-04-01T02:24:58.266 INFO:teuthology.orchestra.run.vm08.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:58.266 INFO:teuthology.orchestra.run.vm08.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:58.279 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.276+0000 7ff6838f9900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:24:58.279 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.276+0000 7ff6838f9900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:24:58.279 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.277+0000 7ff62efed640 20 reqs_thread_entry: start 2026-04-01T02:24:58.290 INFO:teuthology.orchestra.run.vm08.stdout:{"id":"9872c6d3-13db-4797-98fb-df48c9391eb9","name":"default","domain_root":"default.rgw.meta:root","control_pool":"default.rgw.control","dedup_pool":"default.rgw.dedup","gc_pool":"default.rgw.log:gc","lc_pool":"default.rgw.log:lc","log_pool":"default.rgw.log","intent_log_pool":"default.rgw.log:intent","usage_log_pool":"default.rgw.log:usage","roles_pool":"default.rgw.meta:roles","reshard_pool":"default.rgw.log:reshard","user_keys_pool":"default.rgw.meta:users.keys","user_email_pool":"default.rgw.meta:users.email","user_swift_pool":"default.rgw.meta:users.swift","user_uid_pool":"default.rgw.meta:users.uid","otp_pool":"default.rgw.otp","notif_pool":"default.rgw.log:notif","topics_pool":"default.rgw.meta:topics","account_pool":"default.rgw.meta:accounts","group_pool":"default.rgw.meta:groups","system_key":{"access_key":"","secret_key":""},"placement_pools":[{"key":"default-placement","val":{"index_pool":"default.rgw.buckets.index","storage_classes":{"FROZEN":{"data_pool":"default.rgw.buckets.data.frozen"},"LUKEWARM":{"data_pool":"default.rgw.buckets.data.lukewarm"},"STANDARD":{"data_pool":"default.rgw.buckets.data"}},"data_extra_pool":"default.rgw.buckets.non-ec","index_type":0,"inline_data":true}}],"realm_id":"","restore_pool":"default.rgw.log:restore"} 2026-04-01T02:24:58.290 DEBUG:tasks.util.rgw: json result: {'id': '9872c6d3-13db-4797-98fb-df48c9391eb9', 'name': 'default', 'domain_root': 'default.rgw.meta:root', 'control_pool': 'default.rgw.control', 'dedup_pool': 'default.rgw.dedup', 'gc_pool': 'default.rgw.log:gc', 'lc_pool': 'default.rgw.log:lc', 'log_pool': 'default.rgw.log', 'intent_log_pool': 'default.rgw.log:intent', 'usage_log_pool': 'default.rgw.log:usage', 'roles_pool': 'default.rgw.meta:roles', 'reshard_pool': 'default.rgw.log:reshard', 'user_keys_pool': 'default.rgw.meta:users.keys', 'user_email_pool': 'default.rgw.meta:users.email', 'user_swift_pool': 'default.rgw.meta:users.swift', 'user_uid_pool': 'default.rgw.meta:users.uid', 'otp_pool': 'default.rgw.otp', 'notif_pool': 'default.rgw.log:notif', 'topics_pool': 'default.rgw.meta:topics', 'account_pool': 'default.rgw.meta:accounts', 'group_pool': 'default.rgw.meta:groups', 'system_key': {'access_key': '', 'secret_key': ''}, 'placement_pools': [{'key': 'default-placement', 'val': {'index_pool': 'default.rgw.buckets.index', 'storage_classes': {'FROZEN': {'data_pool': 'default.rgw.buckets.data.frozen'}, 'LUKEWARM': {'data_pool': 'default.rgw.buckets.data.lukewarm'}, 'STANDARD': {'data_pool': 'default.rgw.buckets.data'}}, 'data_extra_pool': 'default.rgw.buckets.non-ec', 'index_type': 0, 'inline_data': True}}], 'realm_id': '', 'restore_pool': 'default.rgw.log:restore'} 2026-04-01T02:24:58.290 INFO:tasks.rgw:Configuring storage class = LUKEWARM 2026-04-01T02:24:58.290 INFO:tasks.util.rgw:rgwadmin: client.2 : ['zonegroup', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'LUKEWARM'] 2026-04-01T02:24:58.290 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.2', '--cluster', 'ceph', 'zonegroup', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'LUKEWARM'] 2026-04-01T02:24:58.290 DEBUG:teuthology.orchestra.run.vm08:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.2 --cluster ceph zonegroup placement add --rgw-zone default --placement-id default-placement --storage-class LUKEWARM 2026-04-01T02:24:58.367 INFO:teuthology.orchestra.run.vm08.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:58.367 INFO:teuthology.orchestra.run.vm08.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:58.380 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.377+0000 7fe42cdce900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:24:58.380 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.377+0000 7fe42cdce900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:24:58.381 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.377+0000 7fe3d57ea640 20 reqs_thread_entry: start 2026-04-01T02:24:58.389 INFO:teuthology.orchestra.run.vm08.stdout:[{"key":"default-placement","val":{"name":"default-placement","tags":[],"storage_classes":["FROZEN","LUKEWARM","STANDARD"]}}] 2026-04-01T02:24:58.389 DEBUG:tasks.util.rgw: json result: [{'key': 'default-placement', 'val': {'name': 'default-placement', 'tags': [], 'storage_classes': ['FROZEN', 'LUKEWARM', 'STANDARD']}}] 2026-04-01T02:24:58.389 INFO:tasks.util.rgw:rgwadmin: client.2 : ['zone', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'LUKEWARM', '--data-pool', 'default.rgw.buckets.data.lukewarm'] 2026-04-01T02:24:58.389 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.2', '--cluster', 'ceph', 'zone', 'placement', 'add', '--rgw-zone', 'default', '--placement-id', 'default-placement', '--storage-class', 'LUKEWARM', '--data-pool', 'default.rgw.buckets.data.lukewarm'] 2026-04-01T02:24:58.389 DEBUG:teuthology.orchestra.run.vm08:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.2 --cluster ceph zone placement add --rgw-zone default --placement-id default-placement --storage-class LUKEWARM --data-pool default.rgw.buckets.data.lukewarm 2026-04-01T02:24:58.471 INFO:teuthology.orchestra.run.vm08.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:24:58.472 INFO:teuthology.orchestra.run.vm08.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:24:58.485 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.482+0000 7f52c0b52900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:24:58.485 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.482+0000 7f52c0b52900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:24:58.486 INFO:teuthology.orchestra.run.vm08.stderr:2026-04-01T02:24:58.483+0000 7f52677fe640 20 reqs_thread_entry: start 2026-04-01T02:24:58.496 INFO:teuthology.orchestra.run.vm08.stdout:{"id":"9872c6d3-13db-4797-98fb-df48c9391eb9","name":"default","domain_root":"default.rgw.meta:root","control_pool":"default.rgw.control","dedup_pool":"default.rgw.dedup","gc_pool":"default.rgw.log:gc","lc_pool":"default.rgw.log:lc","log_pool":"default.rgw.log","intent_log_pool":"default.rgw.log:intent","usage_log_pool":"default.rgw.log:usage","roles_pool":"default.rgw.meta:roles","reshard_pool":"default.rgw.log:reshard","user_keys_pool":"default.rgw.meta:users.keys","user_email_pool":"default.rgw.meta:users.email","user_swift_pool":"default.rgw.meta:users.swift","user_uid_pool":"default.rgw.meta:users.uid","otp_pool":"default.rgw.otp","notif_pool":"default.rgw.log:notif","topics_pool":"default.rgw.meta:topics","account_pool":"default.rgw.meta:accounts","group_pool":"default.rgw.meta:groups","system_key":{"access_key":"","secret_key":""},"placement_pools":[{"key":"default-placement","val":{"index_pool":"default.rgw.buckets.index","storage_classes":{"FROZEN":{"data_pool":"default.rgw.buckets.data.frozen"},"LUKEWARM":{"data_pool":"default.rgw.buckets.data.lukewarm"},"STANDARD":{"data_pool":"default.rgw.buckets.data"}},"data_extra_pool":"default.rgw.buckets.non-ec","index_type":0,"inline_data":true}}],"realm_id":"","restore_pool":"default.rgw.log:restore"} 2026-04-01T02:24:58.497 DEBUG:tasks.util.rgw: json result: {'id': '9872c6d3-13db-4797-98fb-df48c9391eb9', 'name': 'default', 'domain_root': 'default.rgw.meta:root', 'control_pool': 'default.rgw.control', 'dedup_pool': 'default.rgw.dedup', 'gc_pool': 'default.rgw.log:gc', 'lc_pool': 'default.rgw.log:lc', 'log_pool': 'default.rgw.log', 'intent_log_pool': 'default.rgw.log:intent', 'usage_log_pool': 'default.rgw.log:usage', 'roles_pool': 'default.rgw.meta:roles', 'reshard_pool': 'default.rgw.log:reshard', 'user_keys_pool': 'default.rgw.meta:users.keys', 'user_email_pool': 'default.rgw.meta:users.email', 'user_swift_pool': 'default.rgw.meta:users.swift', 'user_uid_pool': 'default.rgw.meta:users.uid', 'otp_pool': 'default.rgw.otp', 'notif_pool': 'default.rgw.log:notif', 'topics_pool': 'default.rgw.meta:topics', 'account_pool': 'default.rgw.meta:accounts', 'group_pool': 'default.rgw.meta:groups', 'system_key': {'access_key': '', 'secret_key': ''}, 'placement_pools': [{'key': 'default-placement', 'val': {'index_pool': 'default.rgw.buckets.index', 'storage_classes': {'FROZEN': {'data_pool': 'default.rgw.buckets.data.frozen'}, 'LUKEWARM': {'data_pool': 'default.rgw.buckets.data.lukewarm'}, 'STANDARD': {'data_pool': 'default.rgw.buckets.data'}}, 'data_extra_pool': 'default.rgw.buckets.non-ec', 'index_type': 0, 'inline_data': True}}], 'realm_id': '', 'restore_pool': 'default.rgw.log:restore'} 2026-04-01T02:24:58.497 INFO:tasks.rgw:Starting rgw... 2026-04-01T02:24:58.497 INFO:tasks.rgw:rgw client.0 config is {} 2026-04-01T02:24:58.497 INFO:tasks.rgw:Using beast as radosgw frontend 2026-04-01T02:24:58.497 DEBUG:teuthology.orchestra.run.vm03:> sudo echo -n http://vm03.local:80 | sudo tee /home/ubuntu/cephtest/url_file 2026-04-01T02:24:58.526 INFO:teuthology.orchestra.run.vm03.stdout:http://vm03.local:80 2026-04-01T02:24:58.526 DEBUG:teuthology.orchestra.run.vm03:> sudo chown ceph /home/ubuntu/cephtest/url_file 2026-04-01T02:24:58.590 INFO:tasks.rgw.client.0:Restarting daemon 2026-04-01T02:24:58.590 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper term radosgw --rgw-frontends 'beast port=80' -n client.0 --cluster ceph -k /etc/ceph/ceph.client.0.keyring --log-file /var/log/ceph/rgw.ceph.client.0.log --rgw_ops_log_socket_path /home/ubuntu/cephtest/rgw.opslog.ceph.client.0.sock --foreground | sudo tee /var/log/ceph/rgw.ceph.client.0.stdout 2>&1 2026-04-01T02:24:58.632 INFO:tasks.rgw.client.0:Started 2026-04-01T02:24:58.632 INFO:tasks.rgw:rgw client.1 config is {} 2026-04-01T02:24:58.632 INFO:tasks.rgw:Using beast as radosgw frontend 2026-04-01T02:24:58.632 DEBUG:teuthology.orchestra.run.vm06:> sudo echo -n http://vm06.local:80 | sudo tee /home/ubuntu/cephtest/url_file 2026-04-01T02:24:58.658 INFO:teuthology.orchestra.run.vm06.stdout:http://vm06.local:80 2026-04-01T02:24:58.658 DEBUG:teuthology.orchestra.run.vm06:> sudo chown ceph /home/ubuntu/cephtest/url_file 2026-04-01T02:24:58.724 INFO:tasks.rgw.client.1:Restarting daemon 2026-04-01T02:24:58.724 DEBUG:teuthology.orchestra.run.vm06:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper term radosgw --rgw-frontends 'beast port=80' -n client.1 --cluster ceph -k /etc/ceph/ceph.client.1.keyring --log-file /var/log/ceph/rgw.ceph.client.1.log --rgw_ops_log_socket_path /home/ubuntu/cephtest/rgw.opslog.ceph.client.1.sock --foreground | sudo tee /var/log/ceph/rgw.ceph.client.1.stdout 2>&1 2026-04-01T02:24:58.765 INFO:tasks.rgw.client.1:Started 2026-04-01T02:24:58.766 INFO:tasks.rgw:rgw client.2 config is {} 2026-04-01T02:24:58.766 INFO:tasks.rgw:Using beast as radosgw frontend 2026-04-01T02:24:58.766 DEBUG:teuthology.orchestra.run.vm08:> sudo echo -n http://vm08.local:80 | sudo tee /home/ubuntu/cephtest/url_file 2026-04-01T02:24:58.792 INFO:teuthology.orchestra.run.vm08.stdout:http://vm08.local:80 2026-04-01T02:24:58.792 DEBUG:teuthology.orchestra.run.vm08:> sudo chown ceph /home/ubuntu/cephtest/url_file 2026-04-01T02:24:58.860 INFO:tasks.rgw.client.2:Restarting daemon 2026-04-01T02:24:58.860 DEBUG:teuthology.orchestra.run.vm08:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper term radosgw --rgw-frontends 'beast port=80' -n client.2 --cluster ceph -k /etc/ceph/ceph.client.2.keyring --log-file /var/log/ceph/rgw.ceph.client.2.log --rgw_ops_log_socket_path /home/ubuntu/cephtest/rgw.opslog.ceph.client.2.sock --foreground | sudo tee /var/log/ceph/rgw.ceph.client.2.stdout 2>&1 2026-04-01T02:24:58.902 INFO:tasks.rgw.client.2:Started 2026-04-01T02:24:58.902 INFO:tasks.rgw:Polling client.0 until it starts accepting connections on http://vm03.local:80/ 2026-04-01T02:24:58.902 DEBUG:teuthology.orchestra.run.vm03:> curl http://vm03.local:80/ 2026-04-01T02:24:58.933 INFO:teuthology.orchestra.run.vm03.stderr: % Total % Received % Xferd Average Speed Time Time Time Current 2026-04-01T02:24:58.933 INFO:teuthology.orchestra.run.vm03.stderr: Dload Upload Total Spent Left Speed 2026-04-01T02:24:58.936 INFO:teuthology.orchestra.run.vm03.stderr: 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 187 0 187 0 0 62333 0 --:--:-- --:--:-- --:--:-- 62333 2026-04-01T02:24:58.937 INFO:teuthology.orchestra.run.vm03.stdout:anonymous 2026-04-01T02:24:58.937 INFO:tasks.rgw:Polling client.1 until it starts accepting connections on http://vm06.local:80/ 2026-04-01T02:24:58.937 DEBUG:teuthology.orchestra.run.vm06:> curl http://vm06.local:80/ 2026-04-01T02:24:58.969 INFO:teuthology.orchestra.run.vm06.stderr: % Total % Received % Xferd Average Speed Time Time Time Current 2026-04-01T02:24:58.969 INFO:teuthology.orchestra.run.vm06.stderr: Dload Upload Total Spent Left Speed 2026-04-01T02:24:58.970 INFO:teuthology.orchestra.run.vm06.stderr: 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 2026-04-01T02:24:58.970 INFO:teuthology.orchestra.run.vm06.stderr:curl: (7) Failed to connect to vm06.local port 80: Connection refused 2026-04-01T02:24:58.971 DEBUG:teuthology.orchestra.run:got remote process result: 7 2026-04-01T02:24:59.972 DEBUG:teuthology.orchestra.run.vm06:> curl http://vm06.local:80/ 2026-04-01T02:24:59.997 INFO:teuthology.orchestra.run.vm06.stderr: % Total % Received % Xferd Average Speed Time Time Time Current 2026-04-01T02:24:59.997 INFO:teuthology.orchestra.run.vm06.stderr: Dload Upload Total Spent Left Speed 2026-04-01T02:24:59.999 INFO:teuthology.orchestra.run.vm06.stderr: 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 187 0 187 0 0 182k 0 --:--:-- --:--:-- --:--:-- 182k 2026-04-01T02:25:00.000 INFO:teuthology.orchestra.run.vm06.stdout:anonymous 2026-04-01T02:25:00.000 INFO:tasks.rgw:Polling client.2 until it starts accepting connections on http://vm08.local:80/ 2026-04-01T02:25:00.000 DEBUG:teuthology.orchestra.run.vm08:> curl http://vm08.local:80/ 2026-04-01T02:25:00.018 INFO:teuthology.orchestra.run.vm08.stderr: % Total % Received % Xferd Average Speed Time Time Time Current 2026-04-01T02:25:00.018 INFO:teuthology.orchestra.run.vm08.stderr: Dload Upload Total Spent Left Speed 2026-04-01T02:25:00.020 INFO:teuthology.orchestra.run.vm08.stderr: 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 187 0 187 0 0 182k 0 --:--:-- --:--:-- --:--:-- 182k 2026-04-01T02:25:00.020 INFO:teuthology.orchestra.run.vm08.stdout:anonymous 2026-04-01T02:25:00.020 INFO:teuthology.run_tasks:Running task tox... 2026-04-01T02:25:00.023 INFO:tasks.tox:Deploying tox from pip... 2026-04-01T02:25:00.023 DEBUG:teuthology.orchestra.run.vm03:> python3 -m venv /home/ubuntu/cephtest/tox-venv 2026-04-01T02:25:01.310 DEBUG:teuthology.orchestra.run.vm03:> source /home/ubuntu/cephtest/tox-venv/bin/activate && pip install tox 2026-04-01T02:25:01.631 INFO:teuthology.orchestra.run.vm03.stdout:Collecting tox 2026-04-01T02:25:01.661 INFO:teuthology.orchestra.run.vm03.stdout: Downloading tox-4.30.3-py3-none-any.whl (175 kB) 2026-04-01T02:25:01.736 INFO:teuthology.orchestra.run.vm03.stdout:Collecting chardet>=5.2 2026-04-01T02:25:01.745 INFO:teuthology.orchestra.run.vm03.stdout: Downloading chardet-5.2.0-py3-none-any.whl (199 kB) 2026-04-01T02:25:01.808 INFO:teuthology.orchestra.run.vm03.stdout:Collecting tomli>=2.2.1 2026-04-01T02:25:01.817 INFO:teuthology.orchestra.run.vm03.stdout: Downloading tomli-2.4.1-py3-none-any.whl (14 kB) 2026-04-01T02:25:01.851 INFO:teuthology.orchestra.run.vm03.stdout:Collecting cachetools>=6.1 2026-04-01T02:25:01.859 INFO:teuthology.orchestra.run.vm03.stdout: Downloading cachetools-6.2.6-py3-none-any.whl (11 kB) 2026-04-01T02:25:01.898 INFO:teuthology.orchestra.run.vm03.stdout:Collecting filelock>=3.18 2026-04-01T02:25:01.907 INFO:teuthology.orchestra.run.vm03.stdout: Downloading filelock-3.19.1-py3-none-any.whl (15 kB) 2026-04-01T02:25:01.940 INFO:teuthology.orchestra.run.vm03.stdout:Collecting packaging>=25 2026-04-01T02:25:01.948 INFO:teuthology.orchestra.run.vm03.stdout: Downloading packaging-26.0-py3-none-any.whl (74 kB) 2026-04-01T02:25:01.972 INFO:teuthology.orchestra.run.vm03.stdout:Collecting pyproject-api>=1.9.1 2026-04-01T02:25:01.981 INFO:teuthology.orchestra.run.vm03.stdout: Downloading pyproject_api-1.9.1-py3-none-any.whl (13 kB) 2026-04-01T02:25:02.012 INFO:teuthology.orchestra.run.vm03.stdout:Collecting typing-extensions>=4.14.1 2026-04-01T02:25:02.021 INFO:teuthology.orchestra.run.vm03.stdout: Downloading typing_extensions-4.15.0-py3-none-any.whl (44 kB) 2026-04-01T02:25:02.049 INFO:teuthology.orchestra.run.vm03.stdout:Collecting pluggy>=1.6 2026-04-01T02:25:02.057 INFO:teuthology.orchestra.run.vm03.stdout: Downloading pluggy-1.6.0-py3-none-any.whl (20 kB) 2026-04-01T02:25:02.083 INFO:teuthology.orchestra.run.vm03.stdout:Collecting colorama>=0.4.6 2026-04-01T02:25:02.092 INFO:teuthology.orchestra.run.vm03.stdout: Downloading colorama-0.4.6-py2.py3-none-any.whl (25 kB) 2026-04-01T02:25:02.182 INFO:teuthology.orchestra.run.vm03.stdout:Collecting virtualenv>=20.31.2 2026-04-01T02:25:02.190 INFO:teuthology.orchestra.run.vm03.stdout: Downloading virtualenv-21.2.0-py3-none-any.whl (5.8 MB) 2026-04-01T02:25:02.303 INFO:teuthology.orchestra.run.vm03.stdout:Collecting platformdirs>=4.3.8 2026-04-01T02:25:02.312 INFO:teuthology.orchestra.run.vm03.stdout: Downloading platformdirs-4.4.0-py3-none-any.whl (18 kB) 2026-04-01T02:25:02.358 INFO:teuthology.orchestra.run.vm03.stdout:Collecting python-discovery>=1 2026-04-01T02:25:02.366 INFO:teuthology.orchestra.run.vm03.stdout: Downloading python_discovery-1.2.1-py3-none-any.whl (31 kB) 2026-04-01T02:25:02.388 INFO:teuthology.orchestra.run.vm03.stdout:Collecting distlib<1,>=0.3.7 2026-04-01T02:25:02.397 INFO:teuthology.orchestra.run.vm03.stdout: Downloading distlib-0.4.0-py2.py3-none-any.whl (469 kB) 2026-04-01T02:25:02.471 INFO:teuthology.orchestra.run.vm03.stdout:Installing collected packages: platformdirs, filelock, typing-extensions, tomli, python-discovery, packaging, distlib, virtualenv, pyproject-api, pluggy, colorama, chardet, cachetools, tox 2026-04-01T02:25:02.836 INFO:teuthology.orchestra.run.vm03.stdout:Successfully installed cachetools-6.2.6 chardet-5.2.0 colorama-0.4.6 distlib-0.4.0 filelock-3.19.1 packaging-26.0 platformdirs-4.4.0 pluggy-1.6.0 pyproject-api-1.9.1 python-discovery-1.2.1 tomli-2.4.1 tox-4.30.3 typing-extensions-4.15.0 virtualenv-21.2.0 2026-04-01T02:25:02.923 INFO:teuthology.orchestra.run.vm03.stderr:WARNING: You are using pip version 21.3.1; however, version 26.0.1 is available. 2026-04-01T02:25:02.923 INFO:teuthology.orchestra.run.vm03.stderr:You should consider upgrading via the '/home/ubuntu/cephtest/tox-venv/bin/python3 -m pip install --upgrade pip' command. 2026-04-01T02:25:02.960 INFO:teuthology.run_tasks:Running task tox... 2026-04-01T02:25:02.963 INFO:tasks.tox:Deploying tox from pip... 2026-04-01T02:25:02.963 DEBUG:teuthology.orchestra.run.vm03:> python3 -m venv /home/ubuntu/cephtest/tox-venv 2026-04-01T02:25:03.709 DEBUG:teuthology.orchestra.run.vm03:> source /home/ubuntu/cephtest/tox-venv/bin/activate && pip install tox 2026-04-01T02:25:03.870 INFO:teuthology.orchestra.run.vm03.stdout:Requirement already satisfied: tox in ./cephtest/tox-venv/lib/python3.9/site-packages (4.30.3) 2026-04-01T02:25:03.876 INFO:teuthology.orchestra.run.vm03.stdout:Requirement already satisfied: cachetools>=6.1 in ./cephtest/tox-venv/lib/python3.9/site-packages (from tox) (6.2.6) 2026-04-01T02:25:03.876 INFO:teuthology.orchestra.run.vm03.stdout:Requirement already satisfied: packaging>=25 in ./cephtest/tox-venv/lib/python3.9/site-packages (from tox) (26.0) 2026-04-01T02:25:03.876 INFO:teuthology.orchestra.run.vm03.stdout:Requirement already satisfied: filelock>=3.18 in ./cephtest/tox-venv/lib/python3.9/site-packages (from tox) (3.19.1) 2026-04-01T02:25:03.877 INFO:teuthology.orchestra.run.vm03.stdout:Requirement already satisfied: tomli>=2.2.1 in ./cephtest/tox-venv/lib/python3.9/site-packages (from tox) (2.4.1) 2026-04-01T02:25:03.877 INFO:teuthology.orchestra.run.vm03.stdout:Requirement already satisfied: typing-extensions>=4.14.1 in ./cephtest/tox-venv/lib/python3.9/site-packages (from tox) (4.15.0) 2026-04-01T02:25:03.878 INFO:teuthology.orchestra.run.vm03.stdout:Requirement already satisfied: platformdirs>=4.3.8 in ./cephtest/tox-venv/lib/python3.9/site-packages (from tox) (4.4.0) 2026-04-01T02:25:03.878 INFO:teuthology.orchestra.run.vm03.stdout:Requirement already satisfied: pluggy>=1.6 in ./cephtest/tox-venv/lib/python3.9/site-packages (from tox) (1.6.0) 2026-04-01T02:25:03.878 INFO:teuthology.orchestra.run.vm03.stdout:Requirement already satisfied: colorama>=0.4.6 in ./cephtest/tox-venv/lib/python3.9/site-packages (from tox) (0.4.6) 2026-04-01T02:25:03.878 INFO:teuthology.orchestra.run.vm03.stdout:Requirement already satisfied: pyproject-api>=1.9.1 in ./cephtest/tox-venv/lib/python3.9/site-packages (from tox) (1.9.1) 2026-04-01T02:25:03.878 INFO:teuthology.orchestra.run.vm03.stdout:Requirement already satisfied: virtualenv>=20.31.2 in ./cephtest/tox-venv/lib/python3.9/site-packages (from tox) (21.2.0) 2026-04-01T02:25:03.879 INFO:teuthology.orchestra.run.vm03.stdout:Requirement already satisfied: chardet>=5.2 in ./cephtest/tox-venv/lib/python3.9/site-packages (from tox) (5.2.0) 2026-04-01T02:25:03.905 INFO:teuthology.orchestra.run.vm03.stdout:Requirement already satisfied: distlib<1,>=0.3.7 in ./cephtest/tox-venv/lib/python3.9/site-packages (from virtualenv>=20.31.2->tox) (0.4.0) 2026-04-01T02:25:03.906 INFO:teuthology.orchestra.run.vm03.stdout:Requirement already satisfied: python-discovery>=1 in ./cephtest/tox-venv/lib/python3.9/site-packages (from virtualenv>=20.31.2->tox) (1.2.1) 2026-04-01T02:25:03.923 INFO:teuthology.orchestra.run.vm03.stderr:WARNING: You are using pip version 21.3.1; however, version 26.0.1 is available. 2026-04-01T02:25:03.923 INFO:teuthology.orchestra.run.vm03.stderr:You should consider upgrading via the '/home/ubuntu/cephtest/tox-venv/bin/python3 -m pip install --upgrade pip' command. 2026-04-01T02:25:03.962 INFO:teuthology.run_tasks:Running task dedup-tests... 2026-04-01T02:25:03.966 DEBUG:tasks.dedup_tests:config is {'client.0': {'rgw_server': 'client.0'}} 2026-04-01T02:25:03.966 INFO:tasks.dedup_tests:Downloading dedup-tests... 2026-04-01T02:25:03.966 INFO:tasks.dedup_tests:Using branch tt-20.2.0-sse-s3-kmip-preview-not-for-production-1 from http://git.local/ceph.git for dedup tests 2026-04-01T02:25:03.966 DEBUG:teuthology.orchestra.run.vm03:> git clone -b tt-20.2.0-sse-s3-kmip-preview-not-for-production-1 http://git.local/ceph.git /home/ubuntu/cephtest/ceph 2026-04-01T02:25:03.988 INFO:teuthology.orchestra.run.vm03.stderr:Cloning into '/home/ubuntu/cephtest/ceph'... 2026-04-01T02:25:37.817 INFO:tasks.dedup_tests:Creating rgw user... 2026-04-01T02:25:37.817 DEBUG:tasks.dedup_tests:Creating user foo.client.0 on client.0 2026-04-01T02:25:37.817 DEBUG:teuthology.orchestra.run.vm03:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin -n client.0 user create --uid foo.client.0 --display-name 'Mr. foo.client.0' --access-key JHXGVEMVXQHFCMADVEHP --secret R8wCVX7jI8Fk1N0OWOk81B6lMQQ7bP0ufPWW0rFrfzDiwcIuKFsMMQ== --cluster ceph 2026-04-01T02:25:37.900 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:25:37.901 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:25:37.923 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.920+0000 7fb2fd161900 20 rados->read ofs=0 len=0 2026-04-01T02:25:37.924 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.921+0000 7fb2fd161900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:25:37.924 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.921+0000 7fb2fd161900 20 realm 2026-04-01T02:25:37.924 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.921+0000 7fb2fd161900 20 rados->read ofs=0 len=0 2026-04-01T02:25:37.924 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.921+0000 7fb2fd161900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:25:37.924 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.921+0000 7fb2fd161900 4 RGWPeriod::init failed to init realm id : (2) No such file or directory 2026-04-01T02:25:37.924 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.921+0000 7fb2fd161900 20 rados->read ofs=0 len=0 2026-04-01T02:25:37.924 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.922+0000 7fb2fd161900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:25:37.924 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.922+0000 7fb2fd161900 20 rados->read ofs=0 len=0 2026-04-01T02:25:37.926 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.923+0000 7fb2fd161900 20 rados_obj.operate() r=0 bl.length=46 2026-04-01T02:25:37.926 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.923+0000 7fb2fd161900 20 rados->read ofs=0 len=0 2026-04-01T02:25:37.926 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.924+0000 7fb2fd161900 20 rados_obj.operate() r=0 bl.length=1190 2026-04-01T02:25:37.926 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.924+0000 7fb2fd161900 20 searching for the correct realm 2026-04-01T02:25:37.938 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.935+0000 7fb2fd161900 20 RGWRados::pool_iterate: got zone_info.9872c6d3-13db-4797-98fb-df48c9391eb9 2026-04-01T02:25:37.938 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.935+0000 7fb2fd161900 20 RGWRados::pool_iterate: got default.zonegroup. 2026-04-01T02:25:37.938 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.935+0000 7fb2fd161900 20 RGWRados::pool_iterate: got zonegroup_info.13fe02e2-6f52-4a18-a50f-ad9150d4f62b 2026-04-01T02:25:37.938 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.935+0000 7fb2fd161900 20 RGWRados::pool_iterate: got default.zone. 2026-04-01T02:25:37.938 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.935+0000 7fb2fd161900 20 RGWRados::pool_iterate: got zone_names.default 2026-04-01T02:25:37.938 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.935+0000 7fb2fd161900 20 RGWRados::pool_iterate: got zonegroups_names.default 2026-04-01T02:25:37.938 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.935+0000 7fb2fd161900 20 rados->read ofs=0 len=0 2026-04-01T02:25:37.938 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.935+0000 7fb2fd161900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:25:37.938 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.935+0000 7fb2fd161900 20 rados->read ofs=0 len=0 2026-04-01T02:25:37.938 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.935+0000 7fb2fd161900 20 rados_obj.operate() r=0 bl.length=46 2026-04-01T02:25:37.938 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.935+0000 7fb2fd161900 20 rados->read ofs=0 len=0 2026-04-01T02:25:37.939 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.936+0000 7fb2fd161900 20 rados_obj.operate() r=0 bl.length=470 2026-04-01T02:25:37.939 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.936+0000 7fb2fd161900 20 zone default found 2026-04-01T02:25:37.939 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.936+0000 7fb2fd161900 4 Realm: () 2026-04-01T02:25:37.939 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.936+0000 7fb2fd161900 4 ZoneGroup: default (13fe02e2-6f52-4a18-a50f-ad9150d4f62b) 2026-04-01T02:25:37.939 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.936+0000 7fb2fd161900 4 Zone: default (9872c6d3-13db-4797-98fb-df48c9391eb9) 2026-04-01T02:25:37.939 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.936+0000 7fb2fd161900 10 cannot find current period zonegroup using local zonegroup configuration 2026-04-01T02:25:37.939 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.936+0000 7fb2fd161900 20 zonegroup default 2026-04-01T02:25:37.939 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.936+0000 7fb2fd161900 20 rados->read ofs=0 len=0 2026-04-01T02:25:37.939 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.936+0000 7fb2fd161900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:25:37.939 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.936+0000 7fb2fd161900 20 rados->read ofs=0 len=0 2026-04-01T02:25:37.939 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.936+0000 7fb2fd161900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:25:37.939 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.936+0000 7fb2fd161900 20 rados->read ofs=0 len=0 2026-04-01T02:25:37.939 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.937+0000 7fb2fd161900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:25:37.939 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.937+0000 7fb2fd161900 20 started sync module instance, tier type = 2026-04-01T02:25:37.939 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.937+0000 7fb2fd161900 20 started zone id=9872c6d3-13db-4797-98fb-df48c9391eb9 (name=default) with tier type = 2026-04-01T02:25:37.944 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.941+0000 7fb2fd161900 20 add_watcher() i=0 2026-04-01T02:25:37.944 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.942+0000 7fb2fd161900 20 add_watcher() i=6 2026-04-01T02:25:37.944 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.942+0000 7fb2fd161900 20 add_watcher() i=3 2026-04-01T02:25:37.945 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.942+0000 7fb2fd161900 20 add_watcher() i=5 2026-04-01T02:25:37.947 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.944+0000 7fb2fd161900 20 add_watcher() i=7 2026-04-01T02:25:37.947 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.945+0000 7fb2fd161900 20 add_watcher() i=4 2026-04-01T02:25:37.947 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.945+0000 7fb2fd161900 20 add_watcher() i=2 2026-04-01T02:25:37.948 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.945+0000 7fb2fd161900 20 add_watcher() i=1 2026-04-01T02:25:37.948 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.945+0000 7fb2fd161900 2 all 8 watchers are set, enabling cache 2026-04-01T02:25:37.950 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.948+0000 7fb2fd161900 20 rgw_check_secure_mon_conn(): auth registy supported: methods=[2] modes=[2,1] 2026-04-01T02:25:37.950 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.948+0000 7fb2fd161900 20 rgw_check_secure_mon_conn(): mode 1 is insecure 2026-04-01T02:25:37.950 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.948+0000 7fb2fd161900 5 note: GC not initialized 2026-04-01T02:25:37.951 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:37.948+0000 7fb2a5feb640 20 reqs_thread_entry: start 2026-04-01T02:25:38.007 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.004+0000 7fb2fd161900 20 init_complete bucket index max shards: 11 2026-04-01T02:25:38.007 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.004+0000 7fb2fd161900 20 Filter name: none 2026-04-01T02:25:38.007 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.004+0000 7fb29f7fe640 20 reqs_thread_entry: start 2026-04-01T02:25:38.007 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.004+0000 7fb2fd161900 10 cache get: name=default.rgw.meta+users.uid+foo.client.0 : miss 2026-04-01T02:25:38.007 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.004+0000 7fb2fd161900 20 rados->read ofs=0 len=0 2026-04-01T02:25:38.007 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.004+0000 7fb2fd161900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:25:38.007 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.004+0000 7fb2fd161900 10 cache put: name=default.rgw.meta+users.uid+foo.client.0 info.flags=0x0 2026-04-01T02:25:38.007 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.005+0000 7fb2fd161900 10 adding default.rgw.meta+users.uid+foo.client.0 to cache LRU end 2026-04-01T02:25:38.007 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.005+0000 7fb2fd161900 10 cache get: name=default.rgw.meta+users.keys+JHXGVEMVXQHFCMADVEHP : miss 2026-04-01T02:25:38.007 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.005+0000 7fb2fd161900 20 rados->read ofs=0 len=0 2026-04-01T02:25:38.008 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.005+0000 7fb2fd161900 20 rados_obj.operate() r=-2 bl.length=0 2026-04-01T02:25:38.008 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.005+0000 7fb2fd161900 10 cache put: name=default.rgw.meta+users.keys+JHXGVEMVXQHFCMADVEHP info.flags=0x0 2026-04-01T02:25:38.008 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.005+0000 7fb2fd161900 10 adding default.rgw.meta+users.keys+JHXGVEMVXQHFCMADVEHP to cache LRU end 2026-04-01T02:25:38.008 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.005+0000 7fb2fd161900 10 cache get: name=default.rgw.meta+users.keys+JHXGVEMVXQHFCMADVEHP : hit (negative entry) 2026-04-01T02:25:38.008 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.005+0000 7fb2fd161900 10 cache get: name=default.rgw.meta+users.keys+JHXGVEMVXQHFCMADVEHP : hit (negative entry) 2026-04-01T02:25:38.010 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.007+0000 7fb2fd161900 10 cache put: name=default.rgw.meta+users.uid+foo.client.0 info.flags=0x17 2026-04-01T02:25:38.010 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.007+0000 7fb2fd161900 10 moving default.rgw.meta+users.uid+foo.client.0 to cache LRU end 2026-04-01T02:25:38.010 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.007+0000 7fb2fd161900 10 distributing notification oid=default.rgw.control:notify.0 cni=[op: 0, obj: default.rgw.meta:users.uid:foo.client.0, ofs0, ns] 2026-04-01T02:25:38.010 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.008+0000 7fb2cf7fe640 10 rgw watcher librados: RGWWatcher::handle_notify() notify_id 163208757248 cookie 94881967084688 notifier 54557 bl.length()=628 2026-04-01T02:25:38.010 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.008+0000 7fb2cf7fe640 10 rgw watcher librados: cache put: name=default.rgw.meta+users.uid+foo.client.0 info.flags=0x17 2026-04-01T02:25:38.011 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.008+0000 7fb2cf7fe640 10 rgw watcher librados: moving default.rgw.meta+users.uid+foo.client.0 to cache LRU end 2026-04-01T02:25:38.013 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.010+0000 7fb2fd161900 10 cache put: name=default.rgw.meta+users.keys+JHXGVEMVXQHFCMADVEHP info.flags=0x7 2026-04-01T02:25:38.013 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.010+0000 7fb2fd161900 10 moving default.rgw.meta+users.keys+JHXGVEMVXQHFCMADVEHP to cache LRU end 2026-04-01T02:25:38.013 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.010+0000 7fb2fd161900 10 distributing notification oid=default.rgw.control:notify.2 cni=[op: 0, obj: default.rgw.meta:users.keys:JHXGVEMVXQHFCMADVEHP, ofs0, ns] 2026-04-01T02:25:38.013 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.010+0000 7fb2cf7fe640 10 rgw watcher librados: RGWWatcher::handle_notify() notify_id 163208757248 cookie 94881967101392 notifier 54557 bl.length()=186 2026-04-01T02:25:38.013 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.010+0000 7fb2cf7fe640 10 rgw watcher librados: cache put: name=default.rgw.meta+users.keys+JHXGVEMVXQHFCMADVEHP info.flags=0x7 2026-04-01T02:25:38.013 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.010+0000 7fb2cf7fe640 10 rgw watcher librados: moving default.rgw.meta+users.keys+JHXGVEMVXQHFCMADVEHP to cache LRU end 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout:{ 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "user_id": "foo.client.0", 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "display_name": "Mr. foo.client.0", 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "email": "", 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "suspended": 0, 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "max_buckets": 1000, 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "subusers": [], 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "keys": [ 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: { 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "user": "foo.client.0", 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "access_key": "JHXGVEMVXQHFCMADVEHP", 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "secret_key": "R8wCVX7jI8Fk1N0OWOk81B6lMQQ7bP0ufPWW0rFrfzDiwcIuKFsMMQ==", 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "active": true, 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "create_date": "2026-04-01T02:25:38.006989Z" 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: } 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: ], 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "swift_keys": [], 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "caps": [], 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "op_mask": "read, write, delete", 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "default_placement": "", 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "default_storage_class": "", 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "placement_tags": [], 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "bucket_quota": { 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "enabled": false, 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "check_on_raw": false, 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "max_size": -1, 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "max_size_kb": 0, 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "max_objects": -1 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: }, 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "user_quota": { 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "enabled": false, 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "check_on_raw": false, 2026-04-01T02:25:38.014 INFO:teuthology.orchestra.run.vm03.stdout: "max_size": -1, 2026-04-01T02:25:38.015 INFO:teuthology.orchestra.run.vm03.stdout: "max_size_kb": 0, 2026-04-01T02:25:38.015 INFO:teuthology.orchestra.run.vm03.stdout: "max_objects": -1 2026-04-01T02:25:38.015 INFO:teuthology.orchestra.run.vm03.stdout: }, 2026-04-01T02:25:38.015 INFO:teuthology.orchestra.run.vm03.stdout: "temp_url_keys": [], 2026-04-01T02:25:38.015 INFO:teuthology.orchestra.run.vm03.stdout: "type": "rgw", 2026-04-01T02:25:38.015 INFO:teuthology.orchestra.run.vm03.stdout: "mfa_ids": [], 2026-04-01T02:25:38.015 INFO:teuthology.orchestra.run.vm03.stdout: "account_id": "", 2026-04-01T02:25:38.015 INFO:teuthology.orchestra.run.vm03.stdout: "path": "/", 2026-04-01T02:25:38.015 INFO:teuthology.orchestra.run.vm03.stdout: "create_date": "2026-04-01T02:25:38.006960Z", 2026-04-01T02:25:38.015 INFO:teuthology.orchestra.run.vm03.stdout: "tags": [], 2026-04-01T02:25:38.015 INFO:teuthology.orchestra.run.vm03.stdout: "group_ids": [] 2026-04-01T02:25:38.015 INFO:teuthology.orchestra.run.vm03.stdout:} 2026-04-01T02:25:38.015 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:25:38.018 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.015+0000 7fb2fd161900 20 remove_watcher() i=0 2026-04-01T02:25:38.018 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.015+0000 7fb2fd161900 2 removed watcher, disabling cache 2026-04-01T02:25:38.019 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.016+0000 7fb2fd161900 20 remove_watcher() i=5 2026-04-01T02:25:38.019 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.016+0000 7fb2fd161900 20 remove_watcher() i=4 2026-04-01T02:25:38.019 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.016+0000 7fb2fd161900 20 remove_watcher() i=3 2026-04-01T02:25:38.019 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.016+0000 7fb2fd161900 20 remove_watcher() i=2 2026-04-01T02:25:38.019 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.016+0000 7fb2fd161900 20 remove_watcher() i=1 2026-04-01T02:25:38.019 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.016+0000 7fb2fd161900 20 remove_watcher() i=6 2026-04-01T02:25:38.019 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:25:38.017+0000 7fb2fd161900 20 remove_watcher() i=7 2026-04-01T02:25:38.026 INFO:tasks.dedup_tests:Configuring dedup-tests... 2026-04-01T02:25:38.026 DEBUG:teuthology.orchestra.run.vm03:> set -ex 2026-04-01T02:25:38.026 DEBUG:teuthology.orchestra.run.vm03:> dd of=/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/deduptests.client.0.conf 2026-04-01T02:25:38.043 INFO:tasks.dedup_tests:Running dedup-tests... 2026-04-01T02:25:38.043 DEBUG:teuthology.orchestra.run.vm03:dedup tests against rgw> source /home/ubuntu/cephtest/tox-venv/bin/activate && cd /home/ubuntu/cephtest/ceph/src/test/rgw/dedup/ && DEDUPTESTS_CONF=./deduptests.client.0.conf tox -- -v -m 'basic_test or request_test or example_test' 2026-04-01T02:25:38.438 INFO:teuthology.orchestra.run.vm03.stdout:py: install_deps> python -I -m pip install -r requirements.txt 2026-04-01T02:25:41.245 INFO:teuthology.orchestra.run.vm03.stdout:py: commands[0]> pytest -v -m 'basic_test or request_test or example_test' 2026-04-01T02:25:41.343 INFO:teuthology.orchestra.run.vm03.stdout:============================= test session starts ============================== 2026-04-01T02:25:41.343 INFO:teuthology.orchestra.run.vm03.stdout:platform linux -- Python 3.9.23, pytest-8.4.2, pluggy-1.6.0 -- /home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/bin/python 2026-04-01T02:25:41.343 INFO:teuthology.orchestra.run.vm03.stdout:cachedir: .tox/py/.pytest_cache 2026-04-01T02:25:41.343 INFO:teuthology.orchestra.run.vm03.stdout:rootdir: /home/ubuntu/cephtest/ceph/src/test/rgw/dedup 2026-04-01T02:25:41.343 INFO:teuthology.orchestra.run.vm03.stdout:configfile: pytest.ini 2026-04-01T02:25:41.447 INFO:teuthology.orchestra.run.vm03.stdout:collecting ... collected 34 items 2026-04-01T02:25:41.447 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:25:41.583 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_etag_corruption PASSED [ 2%] 2026-04-01T02:25:41.583 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_md5_collisions PASSED [ 5%] 2026-04-01T02:25:41.584 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_small PASSED [ 8%] 2026-04-01T02:25:41.584 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_small_with_tenants PASSED [ 11%] 2026-04-01T02:25:41.584 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_inc_0_with_tenants PASSED [ 14%] 2026-04-01T02:25:41.585 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_inc_0 PASSED [ 17%] 2026-04-01T02:25:41.585 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_inc_1_with_tenants PASSED [ 20%] 2026-04-01T02:25:41.585 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_inc_1 PASSED [ 23%] 2026-04-01T02:25:41.586 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_inc_2_with_tenants PASSED [ 26%] 2026-04-01T02:25:41.586 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_inc_2 PASSED [ 29%] 2026-04-01T02:25:41.586 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_inc_with_remove_multi_tenants PASSED [ 32%] 2026-04-01T02:25:41.587 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_inc_with_remove PASSED [ 35%] 2026-04-01T02:25:41.587 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_multipart_with_tenants PASSED [ 38%] 2026-04-01T02:25:41.587 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_multipart PASSED [ 41%] 2026-04-01T02:25:41.588 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_basic_with_tenants PASSED [ 44%] 2026-04-01T02:25:41.588 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_basic PASSED [ 47%] 2026-04-01T02:25:41.588 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_small_multipart_with_tenants PASSED [ 50%] 2026-04-01T02:25:41.589 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_small_multipart PASSED [ 52%] 2026-04-01T02:25:41.589 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_large_scale_with_tenants PASSED [ 55%] 2026-04-01T02:25:41.589 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_large_scale PASSED [ 58%] 2026-04-01T02:25:41.590 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_empty_bucket PASSED [ 61%] 2026-04-01T02:25:41.590 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_inc_loop_with_tenants PASSED [ 64%] 2026-04-01T02:25:48.116 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_small_with_tenants 2026-04-01T02:25:48.116 INFO:teuthology.orchestra.run.vm03.stdout:-------------------------------- live log call --------------------------------- 2026-04-01T02:25:48.116 INFO:teuthology.orchestra.run.vm03.stdout:INFO dedup.test_dedup:test_dedup.py:1096 dedup completed in 5 seconds 2026-04-01T02:25:48.700 INFO:teuthology.orchestra.run.vm03.stdout:PASSED [ 67%] 2026-04-01T02:28:36.141 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_multipart 2026-04-01T02:28:36.141 INFO:teuthology.orchestra.run.vm03.stdout:-------------------------------- live log call --------------------------------- 2026-04-01T02:28:36.141 INFO:teuthology.orchestra.run.vm03.stdout:INFO dedup.test_dedup:test_dedup.py:1096 dedup completed in 5 seconds 2026-04-01T02:28:41.968 INFO:teuthology.orchestra.run.vm03.stdout:PASSED [ 70%] 2026-04-01T02:28:50.886 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_basic 2026-04-01T02:28:50.886 INFO:teuthology.orchestra.run.vm03.stdout:-------------------------------- live log call --------------------------------- 2026-04-01T02:28:50.886 INFO:teuthology.orchestra.run.vm03.stdout:INFO dedup.test_dedup:test_dedup.py:1096 dedup completed in 5 seconds 2026-04-01T02:28:51.521 INFO:teuthology.orchestra.run.vm03.stdout:PASSED [ 73%] 2026-04-01T02:29:02.399 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_small_multipart 2026-04-01T02:29:02.399 INFO:teuthology.orchestra.run.vm03.stdout:-------------------------------- live log call --------------------------------- 2026-04-01T02:29:02.399 INFO:teuthology.orchestra.run.vm03.stdout:INFO dedup.test_dedup:test_dedup.py:1096 dedup completed in 5 seconds 2026-04-01T02:29:03.088 INFO:teuthology.orchestra.run.vm03.stdout:PASSED [ 76%] 2026-04-01T02:29:09.228 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_small 2026-04-01T02:29:09.228 INFO:teuthology.orchestra.run.vm03.stdout:-------------------------------- live log call --------------------------------- 2026-04-01T02:29:09.228 INFO:teuthology.orchestra.run.vm03.stdout:INFO dedup.test_dedup:test_dedup.py:1096 dedup completed in 5 seconds 2026-04-01T02:29:09.742 INFO:teuthology.orchestra.run.vm03.stdout:PASSED [ 79%] 2026-04-01T02:29:25.249 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_small_large_mix 2026-04-01T02:29:25.249 INFO:teuthology.orchestra.run.vm03.stdout:-------------------------------- live log call --------------------------------- 2026-04-01T02:29:25.249 INFO:teuthology.orchestra.run.vm03.stdout:INFO dedup.test_dedup:test_dedup.py:1096 dedup completed in 5 seconds 2026-04-01T02:29:26.438 INFO:teuthology.orchestra.run.vm03.stdout:PASSED [ 82%] 2026-04-01T02:29:45.737 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_basic_with_tenants 2026-04-01T02:29:45.737 INFO:teuthology.orchestra.run.vm03.stdout:-------------------------------- live log call --------------------------------- 2026-04-01T02:29:45.737 INFO:teuthology.orchestra.run.vm03.stdout:INFO dedup.test_dedup:test_dedup.py:1096 dedup completed in 5 seconds 2026-04-01T02:29:46.890 INFO:teuthology.orchestra.run.vm03.stdout:PASSED [ 85%] 2026-04-01T02:31:00.503 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_multipart_with_tenants 2026-04-01T02:31:00.503 INFO:teuthology.orchestra.run.vm03.stdout:-------------------------------- live log call --------------------------------- 2026-04-01T02:31:00.503 INFO:teuthology.orchestra.run.vm03.stdout:INFO dedup.test_dedup:test_dedup.py:1096 dedup completed in 5 seconds 2026-04-01T02:31:02.925 INFO:teuthology.orchestra.run.vm03.stdout:PASSED [ 88%] 2026-04-01T02:31:12.473 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_small_multipart_with_tenants 2026-04-01T02:31:12.473 INFO:teuthology.orchestra.run.vm03.stdout:-------------------------------- live log call --------------------------------- 2026-04-01T02:31:12.473 INFO:teuthology.orchestra.run.vm03.stdout:INFO dedup.test_dedup:test_dedup.py:1096 dedup completed in 5 seconds 2026-04-01T02:31:13.118 INFO:teuthology.orchestra.run.vm03.stdout:PASSED [ 91%] 2026-04-01T02:36:24.295 INFO:tasks.ceph.mon.a.vm03.stderr:2026-04-01T02:36:24.292+0000 7f5ef4fb1640 -1 log_channel(cluster) log [ERR] : Health check failed: mon a is very low on available space (MON_DISK_CRIT) 2026-04-01T02:36:29.294 INFO:tasks.ceph.mon.a.vm03.stderr:2026-04-01T02:36:29.292+0000 7f5ef4fb1640 -1 log_channel(cluster) log [ERR] : Health check update: mons a,c are very low on available space (MON_DISK_CRIT) 2026-04-01T02:38:03.796 INFO:tasks.ceph.mon.a.vm03.stderr:2026-04-01T02:38:03.793+0000 7f5ef4fb1640 -1 log_channel(cluster) log [ERR] : Health check update: mons a,b,c are very low on available space (MON_DISK_CRIT) 2026-04-01T02:38:31.169 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_large_scale_with_tenants 2026-04-01T02:38:31.169 INFO:teuthology.orchestra.run.vm03.stdout:-------------------------------- live log call --------------------------------- 2026-04-01T02:38:31.169 INFO:teuthology.orchestra.run.vm03.stdout:INFO dedup.test_dedup:test_dedup.py:1096 dedup completed in 5 seconds 2026-04-01T02:38:31.169 INFO:teuthology.orchestra.run.vm03.stdout:INFO dedup.test_dedup:test_dedup.py:1288 [64] obj_count=65665, upload=420(sec), exec=5(sec), verify=0(sec) 2026-04-01T02:40:00.001 INFO:tasks.ceph.mon.a.vm03.stderr:2026-04-01T02:39:59.999+0000 7f5ef4fb1640 -1 log_channel(cluster) log [ERR] : overall HEALTH_ERR mons a,b,c are very low on available space 2026-04-01T02:40:21.026 INFO:teuthology.orchestra.run.vm03.stdout:PASSED [ 94%] 2026-04-01T02:40:49.028 INFO:tasks.ceph.osd.0.vm03.stderr:problem writing to /var/log/ceph/ceph-osd.0.log: (28) No space left on device 2026-04-01T02:40:49.028 INFO:tasks.ceph.osd.3.vm03.stderr:problem writing to /var/log/ceph/ceph-osd.3.log: (28) No space left on device 2026-04-01T02:40:49.030 INFO:tasks.ceph.osd.2.vm03.stderr:problem writing to /var/log/ceph/ceph-osd.2.log: (28) No space left on device 2026-04-01T02:40:49.030 INFO:tasks.ceph.osd.1.vm03.stderr:problem writing to /var/log/ceph/ceph-osd.1.log: (28) No space left on device 2026-04-01T02:40:49.031 INFO:tasks.ceph.osd.3.vm03.stderr:problem writing to /var/log/ceph/ceph-osd.3.log: (28) No space left on device 2026-04-01T02:40:49.031 INFO:tasks.ceph.osd.2.vm03.stderr:problem writing to /var/log/ceph/ceph-osd.2.log: (28) No space left on device 2026-04-01T02:40:49.031 INFO:tasks.rgw.client.0.vm03.stdout:problem writing to /var/log/ceph/rgw.ceph.client.0.log: (28) No space left on device 2026-04-01T02:40:49.032 INFO:tasks.ceph.mgr.y.vm03.stderr:problem writing to /var/log/ceph/ceph-mgr.y.log: (28) No space left on device 2026-04-01T02:40:49.063 INFO:tasks.ceph.osd.3.vm03.stderr:problem writing to /var/log/ceph/ceph-osd.3.log: (28) No space left on device 2026-04-01T02:40:49.064 INFO:tasks.rgw.client.0.vm03.stdout:tee: /var/log/ceph/rgw.ceph.client.0.stdout: No space left on device 2026-04-01T02:40:49.065 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:40:49.070 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:24.786 INFO:tasks.ceph.osd.6.vm06.stderr:problem writing to /var/log/ceph/ceph-osd.6.log: (28) No space left on device 2026-04-01T02:41:24.787 INFO:tasks.ceph.osd.5.vm06.stderr:problem writing to /var/log/ceph/ceph-osd.5.log: (28) No space left on device 2026-04-01T02:41:24.787 INFO:tasks.ceph.osd.4.vm06.stderr:problem writing to /var/log/ceph/ceph-osd.4.log: (28) No space left on device 2026-04-01T02:41:24.790 INFO:tasks.ceph.osd.7.vm06.stderr:problem writing to /var/log/ceph/ceph-osd.7.log: (28) No space left on device 2026-04-01T02:41:24.790 INFO:tasks.ceph.osd.5.vm06.stderr:problem writing to /var/log/ceph/ceph-osd.5.log: (28) No space left on device 2026-04-01T02:41:24.793 INFO:tasks.ceph.osd.7.vm06.stderr:problem writing to /var/log/ceph/ceph-osd.7.log: (28) No space left on device 2026-04-01T02:41:24.856 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:25.285 INFO:tasks.ceph.mgr.x.vm06.stderr:problem writing to /var/log/ceph/ceph-mgr.x.log: (28) No space left on device 2026-04-01T02:41:25.301 INFO:tasks.ceph.mon.a.vm03.stderr:2026-04-01T02:41:25.298+0000 7f5ef4fb1640 -1 rocksdb: submit_common error: IO error: No space left on device: While open a file for appending: /var/lib/ceph/mon/ceph-a/store.db/000022.log: No space left on device code =  Rocksdb transaction: 2026-04-01T02:41:25.301 INFO:tasks.ceph.mon.a.vm03.stderr:PutCF( prefix = paxos key = '1158' value size = 611) 2026-04-01T02:41:25.301 INFO:tasks.ceph.mon.a.vm03.stderr:PutCF( prefix = paxos key = 'pending_v' value size = 8) 2026-04-01T02:41:25.301 INFO:tasks.ceph.mon.a.vm03.stderr:PutCF( prefix = paxos key = 'pending_pn' value size = 8) 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr:/runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: In function 'int MonitorDBStore::apply_transaction(TransactionRef)' thread 7f5ef4fb1640 time 2026-04-01T02:41:25.299125+0000 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr:/runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: 356: ceph_abort_msg("failed to write to db") 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 1: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0xc9) [0x7f5efad901fd] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 2: ceph-mon(+0x2a61ac) [0x555db4e8b1ac] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 3: (Paxos::begin(ceph::buffer::v15_2_0::list&)+0x54c) [0x555db500995c] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 4: (Paxos::propose_pending()+0x11b) [0x555db501770b] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 5: (Paxos::trigger_propose()+0x118) [0x555db5017b08] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 6: (PaxosService::propose_pending()+0x176) [0x555db5017e46] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 7: ceph-mon(+0x2a644d) [0x555db4e8b44d] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 8: (CommonSafeTimer::timer_thread()+0x130) [0x7f5efaedc550] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 9: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f5efaedcfb1] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 10: /lib64/libc.so.6(+0x8b2ea) [0x7f5ef9e8b2ea] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 11: /lib64/libc.so.6(+0x1103c0) [0x7f5ef9f103c0] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr:*** Caught signal (Aborted) ** 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: in thread 7f5ef4fb1640 thread_name:safe_timer 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 1: /lib64/libc.so.6(+0x3fc30) [0x7f5ef9e3fc30] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 2: /lib64/libc.so.6(+0x8d02c) [0x7f5ef9e8d02c] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 3: raise() 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 4: abort() 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 5: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0x186) [0x7f5efad902ba] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 6: ceph-mon(+0x2a61ac) [0x555db4e8b1ac] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 7: (Paxos::begin(ceph::buffer::v15_2_0::list&)+0x54c) [0x555db500995c] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 8: (Paxos::propose_pending()+0x11b) [0x555db501770b] 2026-04-01T02:41:25.302 INFO:tasks.ceph.mon.a.vm03.stderr: 9: (Paxos::trigger_propose()+0x118) [0x555db5017b08] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 10: (PaxosService::propose_pending()+0x176) [0x555db5017e46] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 11: ceph-mon(+0x2a644d) [0x555db4e8b44d] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 12: (CommonSafeTimer::timer_thread()+0x130) [0x7f5efaedc550] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 13: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f5efaedcfb1] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 14: /lib64/libc.so.6(+0x8b2ea) [0x7f5ef9e8b2ea] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 15: /lib64/libc.so.6(+0x1103c0) [0x7f5ef9f103c0] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr:2026-04-01T02:41:25.300+0000 7f5ef4fb1640 -1 /runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: In function 'int MonitorDBStore::apply_transaction(TransactionRef)' thread 7f5ef4fb1640 time 2026-04-01T02:41:25.299125+0000 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr:/runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: 356: ceph_abort_msg("failed to write to db") 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 1: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0xc9) [0x7f5efad901fd] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 2: ceph-mon(+0x2a61ac) [0x555db4e8b1ac] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 3: (Paxos::begin(ceph::buffer::v15_2_0::list&)+0x54c) [0x555db500995c] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 4: (Paxos::propose_pending()+0x11b) [0x555db501770b] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 5: (Paxos::trigger_propose()+0x118) [0x555db5017b08] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 6: (PaxosService::propose_pending()+0x176) [0x555db5017e46] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 7: ceph-mon(+0x2a644d) [0x555db4e8b44d] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 8: (CommonSafeTimer::timer_thread()+0x130) [0x7f5efaedc550] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 9: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f5efaedcfb1] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 10: /lib64/libc.so.6(+0x8b2ea) [0x7f5ef9e8b2ea] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 11: /lib64/libc.so.6(+0x1103c0) [0x7f5ef9f103c0] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr:2026-04-01T02:41:25.301+0000 7f5ef4fb1640 -1 *** Caught signal (Aborted) ** 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: in thread 7f5ef4fb1640 thread_name:safe_timer 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 1: /lib64/libc.so.6(+0x3fc30) [0x7f5ef9e3fc30] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 2: /lib64/libc.so.6(+0x8d02c) [0x7f5ef9e8d02c] 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 3: raise() 2026-04-01T02:41:25.303 INFO:tasks.ceph.mon.a.vm03.stderr: 4: abort() 2026-04-01T02:41:25.304 INFO:tasks.ceph.mon.a.vm03.stderr: 5: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0x186) [0x7f5efad902ba] 2026-04-01T02:41:25.304 INFO:tasks.ceph.mon.a.vm03.stderr: 6: ceph-mon(+0x2a61ac) [0x555db4e8b1ac] 2026-04-01T02:41:25.304 INFO:tasks.ceph.mon.a.vm03.stderr: 7: (Paxos::begin(ceph::buffer::v15_2_0::list&)+0x54c) [0x555db500995c] 2026-04-01T02:41:25.304 INFO:tasks.ceph.mon.a.vm03.stderr: 8: (Paxos::propose_pending()+0x11b) [0x555db501770b] 2026-04-01T02:41:25.304 INFO:tasks.ceph.mon.a.vm03.stderr: 9: (Paxos::trigger_propose()+0x118) [0x555db5017b08] 2026-04-01T02:41:25.304 INFO:tasks.ceph.mon.a.vm03.stderr: 10: (PaxosService::propose_pending()+0x176) [0x555db5017e46] 2026-04-01T02:41:25.304 INFO:tasks.ceph.mon.a.vm03.stderr: 11: ceph-mon(+0x2a644d) [0x555db4e8b44d] 2026-04-01T02:41:25.304 INFO:tasks.ceph.mon.a.vm03.stderr: 12: (CommonSafeTimer::timer_thread()+0x130) [0x7f5efaedc550] 2026-04-01T02:41:25.304 INFO:tasks.ceph.mon.a.vm03.stderr: 13: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f5efaedcfb1] 2026-04-01T02:41:25.304 INFO:tasks.ceph.mon.a.vm03.stderr: 14: /lib64/libc.so.6(+0x8b2ea) [0x7f5ef9e8b2ea] 2026-04-01T02:41:25.304 INFO:tasks.ceph.mon.a.vm03.stderr: 15: /lib64/libc.so.6(+0x1103c0) [0x7f5ef9f103c0] 2026-04-01T02:41:25.304 INFO:tasks.ceph.mon.a.vm03.stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. 2026-04-01T02:41:25.304 INFO:tasks.ceph.mon.a.vm03.stderr: 2026-04-01T02:41:25.304 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.324 INFO:tasks.ceph.mon.a.vm03.stderr: -2> 2026-04-01T02:41:25.298+0000 7f5ef4fb1640 -1 rocksdb: submit_common error: IO error: No space left on device: While open a file for appending: /var/lib/ceph/mon/ceph-a/store.db/000022.log: No space left on device code =  Rocksdb transaction: 2026-04-01T02:41:25.324 INFO:tasks.ceph.mon.a.vm03.stderr:PutCF( prefix = paxos key = '1158' value size = 611) 2026-04-01T02:41:25.324 INFO:tasks.ceph.mon.a.vm03.stderr:PutCF( prefix = paxos key = 'pending_v' value size = 8) 2026-04-01T02:41:25.324 INFO:tasks.ceph.mon.a.vm03.stderr:PutCF( prefix = paxos key = 'pending_pn' value size = 8) 2026-04-01T02:41:25.324 INFO:tasks.ceph.mon.a.vm03.stderr: -1> 2026-04-01T02:41:25.300+0000 7f5ef4fb1640 -1 /runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: In function 'int MonitorDBStore::apply_transaction(TransactionRef)' thread 7f5ef4fb1640 time 2026-04-01T02:41:25.299125+0000 2026-04-01T02:41:25.324 INFO:tasks.ceph.mon.a.vm03.stderr:/runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: 356: ceph_abort_msg("failed to write to db") 2026-04-01T02:41:25.324 INFO:tasks.ceph.mon.a.vm03.stderr: 2026-04-01T02:41:25.324 INFO:tasks.ceph.mon.a.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:25.324 INFO:tasks.ceph.mon.a.vm03.stderr: 1: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0xc9) [0x7f5efad901fd] 2026-04-01T02:41:25.324 INFO:tasks.ceph.mon.a.vm03.stderr: 2: ceph-mon(+0x2a61ac) [0x555db4e8b1ac] 2026-04-01T02:41:25.324 INFO:tasks.ceph.mon.a.vm03.stderr: 3: (Paxos::begin(ceph::buffer::v15_2_0::list&)+0x54c) [0x555db500995c] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 4: (Paxos::propose_pending()+0x11b) [0x555db501770b] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 5: (Paxos::trigger_propose()+0x118) [0x555db5017b08] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 6: (PaxosService::propose_pending()+0x176) [0x555db5017e46] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 7: ceph-mon(+0x2a644d) [0x555db4e8b44d] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 8: (CommonSafeTimer::timer_thread()+0x130) [0x7f5efaedc550] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 9: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f5efaedcfb1] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 10: /lib64/libc.so.6(+0x8b2ea) [0x7f5ef9e8b2ea] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 11: /lib64/libc.so.6(+0x1103c0) [0x7f5ef9f103c0] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 0> 2026-04-01T02:41:25.301+0000 7f5ef4fb1640 -1 *** Caught signal (Aborted) ** 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: in thread 7f5ef4fb1640 thread_name:safe_timer 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 1: /lib64/libc.so.6(+0x3fc30) [0x7f5ef9e3fc30] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 2: /lib64/libc.so.6(+0x8d02c) [0x7f5ef9e8d02c] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 3: raise() 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 4: abort() 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 5: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0x186) [0x7f5efad902ba] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 6: ceph-mon(+0x2a61ac) [0x555db4e8b1ac] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 7: (Paxos::begin(ceph::buffer::v15_2_0::list&)+0x54c) [0x555db500995c] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 8: (Paxos::propose_pending()+0x11b) [0x555db501770b] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 9: (Paxos::trigger_propose()+0x118) [0x555db5017b08] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 10: (PaxosService::propose_pending()+0x176) [0x555db5017e46] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 11: ceph-mon(+0x2a644d) [0x555db4e8b44d] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 12: (CommonSafeTimer::timer_thread()+0x130) [0x7f5efaedc550] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 13: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f5efaedcfb1] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 14: /lib64/libc.so.6(+0x8b2ea) [0x7f5ef9e8b2ea] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 15: /lib64/libc.so.6(+0x1103c0) [0x7f5ef9f103c0] 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. 2026-04-01T02:41:25.325 INFO:tasks.ceph.mon.a.vm03.stderr: 2026-04-01T02:41:25.326 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.326 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.326 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.326 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.326 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.326 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.326 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.327 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.327 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.327 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.327 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.327 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.327 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.328 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.329 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.330 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.333 INFO:tasks.ceph.mon.a.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.a.log: (28) No space left on device 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr: -9999> 2026-04-01T02:41:25.298+0000 7f5ef4fb1640 -1 rocksdb: submit_common error: IO error: No space left on device: While open a file for appending: /var/lib/ceph/mon/ceph-a/store.db/000022.log: No space left on device code =  Rocksdb transaction: 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr:PutCF( prefix = paxos key = '1158' value size = 611) 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr:PutCF( prefix = paxos key = 'pending_v' value size = 8) 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr:PutCF( prefix = paxos key = 'pending_pn' value size = 8) 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr: -9998> 2026-04-01T02:41:25.300+0000 7f5ef4fb1640 -1 /runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: In function 'int MonitorDBStore::apply_transaction(TransactionRef)' thread 7f5ef4fb1640 time 2026-04-01T02:41:25.299125+0000 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr:/runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: 356: ceph_abort_msg("failed to write to db") 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr: 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr: 1: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0xc9) [0x7f5efad901fd] 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr: 2: ceph-mon(+0x2a61ac) [0x555db4e8b1ac] 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr: 3: (Paxos::begin(ceph::buffer::v15_2_0::list&)+0x54c) [0x555db500995c] 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr: 4: (Paxos::propose_pending()+0x11b) [0x555db501770b] 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr: 5: (Paxos::trigger_propose()+0x118) [0x555db5017b08] 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr: 6: (PaxosService::propose_pending()+0x176) [0x555db5017e46] 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr: 7: ceph-mon(+0x2a644d) [0x555db4e8b44d] 2026-04-01T02:41:25.335 INFO:tasks.ceph.mon.a.vm03.stderr: 8: (CommonSafeTimer::timer_thread()+0x130) [0x7f5efaedc550] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 9: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f5efaedcfb1] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 10: /lib64/libc.so.6(+0x8b2ea) [0x7f5ef9e8b2ea] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 11: /lib64/libc.so.6(+0x1103c0) [0x7f5ef9f103c0] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: -9997> 2026-04-01T02:41:25.301+0000 7f5ef4fb1640 -1 *** Caught signal (Aborted) ** 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: in thread 7f5ef4fb1640 thread_name:safe_timer 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 1: /lib64/libc.so.6(+0x3fc30) [0x7f5ef9e3fc30] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 2: /lib64/libc.so.6(+0x8d02c) [0x7f5ef9e8d02c] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 3: raise() 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 4: abort() 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 5: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0x186) [0x7f5efad902ba] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 6: ceph-mon(+0x2a61ac) [0x555db4e8b1ac] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 7: (Paxos::begin(ceph::buffer::v15_2_0::list&)+0x54c) [0x555db500995c] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 8: (Paxos::propose_pending()+0x11b) [0x555db501770b] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 9: (Paxos::trigger_propose()+0x118) [0x555db5017b08] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 10: (PaxosService::propose_pending()+0x176) [0x555db5017e46] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 11: ceph-mon(+0x2a644d) [0x555db4e8b44d] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 12: (CommonSafeTimer::timer_thread()+0x130) [0x7f5efaedc550] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 13: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f5efaedcfb1] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 14: /lib64/libc.so.6(+0x8b2ea) [0x7f5ef9e8b2ea] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 15: /lib64/libc.so.6(+0x1103c0) [0x7f5ef9f103c0] 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. 2026-04-01T02:41:25.336 INFO:tasks.ceph.mon.a.vm03.stderr: 2026-04-01T02:41:25.481 INFO:tasks.ceph.mon.a.vm03.stderr:daemon-helper: command crashed with signal 6 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr:/runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: In function 'int MonitorDBStore::apply_transaction(TransactionRef)' thread 7f889a998640 time 2026-04-01T02:41:26.747686+0000 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr:/runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: 356: ceph_abort_msg("failed to write to db") 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 1: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0xc9) [0x7f88a07901fd] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 2: ceph-mon(+0x2a61ac) [0x5581cee411ac] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 3: (Elector::persist_connectivity_scores()+0x135) [0x5581cef24865] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 4: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x5581cef2e0c1] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 5: (Elector::ping_check(int)+0x2e2) [0x5581cef29182] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 6: ceph-mon(+0x2a644d) [0x5581cee4144d] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 7: (CommonSafeTimer::timer_thread()+0x130) [0x7f88a08dc550] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 8: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f88a08dcfb1] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 9: /lib64/libc.so.6(+0x8b2ea) [0x7f889f88b2ea] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 10: /lib64/libc.so.6(+0x1103c0) [0x7f889f9103c0] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr:*** Caught signal (Aborted) ** 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: in thread 7f889a998640 thread_name:safe_timer 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 1: /lib64/libc.so.6(+0x3fc30) [0x7f889f83fc30] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 2: /lib64/libc.so.6(+0x8d02c) [0x7f889f88d02c] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 3: raise() 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 4: abort() 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 5: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0x186) [0x7f88a07902ba] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 6: ceph-mon(+0x2a61ac) [0x5581cee411ac] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 7: (Elector::persist_connectivity_scores()+0x135) [0x5581cef24865] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 8: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x5581cef2e0c1] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 9: (Elector::ping_check(int)+0x2e2) [0x5581cef29182] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 10: ceph-mon(+0x2a644d) [0x5581cee4144d] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 11: (CommonSafeTimer::timer_thread()+0x130) [0x7f88a08dc550] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 12: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f88a08dcfb1] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 13: /lib64/libc.so.6(+0x8b2ea) [0x7f889f88b2ea] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr: 14: /lib64/libc.so.6(+0x1103c0) [0x7f889f9103c0] 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr:2026-04-01T02:41:26.747+0000 7f889a998640 -1 rocksdb: submit_common error: IO error: No space left on device: While open a file for appending: /var/lib/ceph/mon/ceph-c/store.db/000022.log: No space left on device code =  Rocksdb transaction: 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr:PutCF( prefix = monitor key = 'connectivity_scores' value size = 238) 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr:2026-04-01T02:41:26.747+0000 7f889a998640 -1 /runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: In function 'int MonitorDBStore::apply_transaction(TransactionRef)' thread 7f889a998640 time 2026-04-01T02:41:26.747686+0000 2026-04-01T02:41:26.750 INFO:tasks.ceph.mon.c.vm03.stderr:/runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: 356: ceph_abort_msg("failed to write to db") 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 1: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0xc9) [0x7f88a07901fd] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 2: ceph-mon(+0x2a61ac) [0x5581cee411ac] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 3: (Elector::persist_connectivity_scores()+0x135) [0x5581cef24865] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 4: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x5581cef2e0c1] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 5: (Elector::ping_check(int)+0x2e2) [0x5581cef29182] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 6: ceph-mon(+0x2a644d) [0x5581cee4144d] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 7: (CommonSafeTimer::timer_thread()+0x130) [0x7f88a08dc550] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 8: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f88a08dcfb1] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 9: /lib64/libc.so.6(+0x8b2ea) [0x7f889f88b2ea] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 10: /lib64/libc.so.6(+0x1103c0) [0x7f889f9103c0] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr:2026-04-01T02:41:26.748+0000 7f889a998640 -1 *** Caught signal (Aborted) ** 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: in thread 7f889a998640 thread_name:safe_timer 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 1: /lib64/libc.so.6(+0x3fc30) [0x7f889f83fc30] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 2: /lib64/libc.so.6(+0x8d02c) [0x7f889f88d02c] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 3: raise() 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 4: abort() 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 5: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0x186) [0x7f88a07902ba] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 6: ceph-mon(+0x2a61ac) [0x5581cee411ac] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 7: (Elector::persist_connectivity_scores()+0x135) [0x5581cef24865] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 8: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x5581cef2e0c1] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 9: (Elector::ping_check(int)+0x2e2) [0x5581cef29182] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 10: ceph-mon(+0x2a644d) [0x5581cee4144d] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 11: (CommonSafeTimer::timer_thread()+0x130) [0x7f88a08dc550] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 12: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f88a08dcfb1] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 13: /lib64/libc.so.6(+0x8b2ea) [0x7f889f88b2ea] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 14: /lib64/libc.so.6(+0x1103c0) [0x7f889f9103c0] 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr: 2026-04-01T02:41:26.751 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.777 INFO:tasks.ceph.mon.c.vm03.stderr: -2> 2026-04-01T02:41:26.747+0000 7f889a998640 -1 rocksdb: submit_common error: IO error: No space left on device: While open a file for appending: /var/lib/ceph/mon/ceph-c/store.db/000022.log: No space left on device code =  Rocksdb transaction: 2026-04-01T02:41:26.777 INFO:tasks.ceph.mon.c.vm03.stderr:PutCF( prefix = monitor key = 'connectivity_scores' value size = 238) 2026-04-01T02:41:26.777 INFO:tasks.ceph.mon.c.vm03.stderr: -1> 2026-04-01T02:41:26.747+0000 7f889a998640 -1 /runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: In function 'int MonitorDBStore::apply_transaction(TransactionRef)' thread 7f889a998640 time 2026-04-01T02:41:26.747686+0000 2026-04-01T02:41:26.777 INFO:tasks.ceph.mon.c.vm03.stderr:/runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: 356: ceph_abort_msg("failed to write to db") 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 1: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0xc9) [0x7f88a07901fd] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 2: ceph-mon(+0x2a61ac) [0x5581cee411ac] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 3: (Elector::persist_connectivity_scores()+0x135) [0x5581cef24865] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 4: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x5581cef2e0c1] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 5: (Elector::ping_check(int)+0x2e2) [0x5581cef29182] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 6: ceph-mon(+0x2a644d) [0x5581cee4144d] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 7: (CommonSafeTimer::timer_thread()+0x130) [0x7f88a08dc550] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 8: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f88a08dcfb1] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 9: /lib64/libc.so.6(+0x8b2ea) [0x7f889f88b2ea] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 10: /lib64/libc.so.6(+0x1103c0) [0x7f889f9103c0] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 0> 2026-04-01T02:41:26.748+0000 7f889a998640 -1 *** Caught signal (Aborted) ** 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: in thread 7f889a998640 thread_name:safe_timer 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 1: /lib64/libc.so.6(+0x3fc30) [0x7f889f83fc30] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 2: /lib64/libc.so.6(+0x8d02c) [0x7f889f88d02c] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 3: raise() 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 4: abort() 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 5: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0x186) [0x7f88a07902ba] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 6: ceph-mon(+0x2a61ac) [0x5581cee411ac] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 7: (Elector::persist_connectivity_scores()+0x135) [0x5581cef24865] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 8: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x5581cef2e0c1] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 9: (Elector::ping_check(int)+0x2e2) [0x5581cef29182] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 10: ceph-mon(+0x2a644d) [0x5581cee4144d] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 11: (CommonSafeTimer::timer_thread()+0x130) [0x7f88a08dc550] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 12: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f88a08dcfb1] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 13: /lib64/libc.so.6(+0x8b2ea) [0x7f889f88b2ea] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 14: /lib64/libc.so.6(+0x1103c0) [0x7f889f9103c0] 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. 2026-04-01T02:41:26.778 INFO:tasks.ceph.mon.c.vm03.stderr: 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.781 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.782 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.782 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.782 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.782 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.782 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.782 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.782 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.782 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.782 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.782 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.782 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.783 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.783 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.784 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.784 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.784 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.784 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.784 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.785 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.786 INFO:tasks.ceph.mon.c.vm03.stderr:problem writing to /var/log/ceph/ceph-mon.c.log: (28) No space left on device 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: -9999> 2026-04-01T02:41:26.747+0000 7f889a998640 -1 rocksdb: submit_common error: IO error: No space left on device: While open a file for appending: /var/lib/ceph/mon/ceph-c/store.db/000022.log: No space left on device code =  Rocksdb transaction: 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr:PutCF( prefix = monitor key = 'connectivity_scores' value size = 238) 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: -9998> 2026-04-01T02:41:26.747+0000 7f889a998640 -1 /runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: In function 'int MonitorDBStore::apply_transaction(TransactionRef)' thread 7f889a998640 time 2026-04-01T02:41:26.747686+0000 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr:/runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: 356: ceph_abort_msg("failed to write to db") 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 1: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0xc9) [0x7f88a07901fd] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 2: ceph-mon(+0x2a61ac) [0x5581cee411ac] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 3: (Elector::persist_connectivity_scores()+0x135) [0x5581cef24865] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 4: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x5581cef2e0c1] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 5: (Elector::ping_check(int)+0x2e2) [0x5581cef29182] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 6: ceph-mon(+0x2a644d) [0x5581cee4144d] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 7: (CommonSafeTimer::timer_thread()+0x130) [0x7f88a08dc550] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 8: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f88a08dcfb1] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 9: /lib64/libc.so.6(+0x8b2ea) [0x7f889f88b2ea] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 10: /lib64/libc.so.6(+0x1103c0) [0x7f889f9103c0] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: -9997> 2026-04-01T02:41:26.748+0000 7f889a998640 -1 *** Caught signal (Aborted) ** 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: in thread 7f889a998640 thread_name:safe_timer 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 1: /lib64/libc.so.6(+0x3fc30) [0x7f889f83fc30] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 2: /lib64/libc.so.6(+0x8d02c) [0x7f889f88d02c] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 3: raise() 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 4: abort() 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 5: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0x186) [0x7f88a07902ba] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 6: ceph-mon(+0x2a61ac) [0x5581cee411ac] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 7: (Elector::persist_connectivity_scores()+0x135) [0x5581cef24865] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 8: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x5581cef2e0c1] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 9: (Elector::ping_check(int)+0x2e2) [0x5581cef29182] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 10: ceph-mon(+0x2a644d) [0x5581cee4144d] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 11: (CommonSafeTimer::timer_thread()+0x130) [0x7f88a08dc550] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 12: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7f88a08dcfb1] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 13: /lib64/libc.so.6(+0x8b2ea) [0x7f889f88b2ea] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 14: /lib64/libc.so.6(+0x1103c0) [0x7f889f9103c0] 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. 2026-04-01T02:41:26.789 INFO:tasks.ceph.mon.c.vm03.stderr: 2026-04-01T02:41:26.899 INFO:tasks.ceph.mon.c.vm03.stderr:daemon-helper: command crashed with signal 6 2026-04-01T02:41:28.848 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~0s 2026-04-01T02:41:28.848 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~0s 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr:2026-04-01T02:41:34.481+0000 7fdb370e2640 -1 rocksdb: submit_common error: IO error: No space left on device: While appending to file: /var/lib/ceph/mon/ceph-b/store.db/000023.sst: No space left on device code =  Rocksdb transaction: 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr:PutCF( prefix = monitor key = 'connectivity_scores' value size = 238) 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr:/runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: In function 'int MonitorDBStore::apply_transaction(TransactionRef)' thread 7fdb370e2640 time 2026-04-01T02:41:34.482919+0000 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr:/runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: 356: ceph_abort_msg("failed to write to db") 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr: 1: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0xc9) [0x7fdb3cf901fd] 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr: 2: ceph-mon(+0x2a61ac) [0x564171eaf1ac] 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr: 3: (Elector::persist_connectivity_scores()+0x135) [0x564171f92865] 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr: 4: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x564171f9c0c1] 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr: 5: (Elector::dead_ping(int)+0x1a1) [0x564171f93b91] 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr: 6: ceph-mon(+0x2a644d) [0x564171eaf44d] 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr: 7: (CommonSafeTimer::timer_thread()+0x130) [0x7fdb3d0dc550] 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr: 8: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7fdb3d0dcfb1] 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr: 9: /lib64/libc.so.6(+0x8b2ea) [0x7fdb3c08b2ea] 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr: 10: /lib64/libc.so.6(+0x1103c0) [0x7fdb3c1103c0] 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr:*** Caught signal (Aborted) ** 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr: in thread 7fdb370e2640 thread_name:safe_timer 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr:2026-04-01T02:41:34.482+0000 7fdb370e2640 -1 /runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: In function 'int MonitorDBStore::apply_transaction(TransactionRef)' thread 7fdb370e2640 time 2026-04-01T02:41:34.482919+0000 2026-04-01T02:41:34.484 INFO:tasks.ceph.mon.b.vm06.stderr:/runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: 356: ceph_abort_msg("failed to write to db") 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 1: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0xc9) [0x7fdb3cf901fd] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 2: ceph-mon(+0x2a61ac) [0x564171eaf1ac] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 3: (Elector::persist_connectivity_scores()+0x135) [0x564171f92865] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 4: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x564171f9c0c1] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 5: (Elector::dead_ping(int)+0x1a1) [0x564171f93b91] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 6: ceph-mon(+0x2a644d) [0x564171eaf44d] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 7: (CommonSafeTimer::timer_thread()+0x130) [0x7fdb3d0dc550] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 8: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7fdb3d0dcfb1] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 9: /lib64/libc.so.6(+0x8b2ea) [0x7fdb3c08b2ea] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 10: /lib64/libc.so.6(+0x1103c0) [0x7fdb3c1103c0] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 1: /lib64/libc.so.6(+0x3fc30) [0x7fdb3c03fc30] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 2: /lib64/libc.so.6(+0x8d02c) [0x7fdb3c08d02c] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 3: raise() 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 4: abort() 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 5: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0x186) [0x7fdb3cf902ba] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 6: ceph-mon(+0x2a61ac) [0x564171eaf1ac] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 7: (Elector::persist_connectivity_scores()+0x135) [0x564171f92865] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 8: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x564171f9c0c1] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 9: (Elector::dead_ping(int)+0x1a1) [0x564171f93b91] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 10: ceph-mon(+0x2a644d) [0x564171eaf44d] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 11: (CommonSafeTimer::timer_thread()+0x130) [0x7fdb3d0dc550] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 12: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7fdb3d0dcfb1] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 13: /lib64/libc.so.6(+0x8b2ea) [0x7fdb3c08b2ea] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 14: /lib64/libc.so.6(+0x1103c0) [0x7fdb3c1103c0] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr:2026-04-01T02:41:34.482+0000 7fdb370e2640 -1 *** Caught signal (Aborted) ** 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: in thread 7fdb370e2640 thread_name:safe_timer 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 1: /lib64/libc.so.6(+0x3fc30) [0x7fdb3c03fc30] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 2: /lib64/libc.so.6(+0x8d02c) [0x7fdb3c08d02c] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 3: raise() 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 4: abort() 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 5: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0x186) [0x7fdb3cf902ba] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 6: ceph-mon(+0x2a61ac) [0x564171eaf1ac] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 7: (Elector::persist_connectivity_scores()+0x135) [0x564171f92865] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 8: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x564171f9c0c1] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 9: (Elector::dead_ping(int)+0x1a1) [0x564171f93b91] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 10: ceph-mon(+0x2a644d) [0x564171eaf44d] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 11: (CommonSafeTimer::timer_thread()+0x130) [0x7fdb3d0dc550] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 12: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7fdb3d0dcfb1] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 13: /lib64/libc.so.6(+0x8b2ea) [0x7fdb3c08b2ea] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: 14: /lib64/libc.so.6(+0x1103c0) [0x7fdb3c1103c0] 2026-04-01T02:41:34.485 INFO:tasks.ceph.mon.b.vm06.stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. 2026-04-01T02:41:34.486 INFO:tasks.ceph.mon.b.vm06.stderr: 2026-04-01T02:41:34.486 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr: -2> 2026-04-01T02:41:34.481+0000 7fdb370e2640 -1 rocksdb: submit_common error: IO error: No space left on device: While appending to file: /var/lib/ceph/mon/ceph-b/store.db/000023.sst: No space left on device code =  Rocksdb transaction: 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr:PutCF( prefix = monitor key = 'connectivity_scores' value size = 238) 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr: -1> 2026-04-01T02:41:34.482+0000 7fdb370e2640 -1 /runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: In function 'int MonitorDBStore::apply_transaction(TransactionRef)' thread 7fdb370e2640 time 2026-04-01T02:41:34.482919+0000 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr:/runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: 356: ceph_abort_msg("failed to write to db") 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr: 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr: 1: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0xc9) [0x7fdb3cf901fd] 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr: 2: ceph-mon(+0x2a61ac) [0x564171eaf1ac] 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr: 3: (Elector::persist_connectivity_scores()+0x135) [0x564171f92865] 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr: 4: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x564171f9c0c1] 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr: 5: (Elector::dead_ping(int)+0x1a1) [0x564171f93b91] 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr: 6: ceph-mon(+0x2a644d) [0x564171eaf44d] 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr: 7: (CommonSafeTimer::timer_thread()+0x130) [0x7fdb3d0dc550] 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr: 8: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7fdb3d0dcfb1] 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr: 9: /lib64/libc.so.6(+0x8b2ea) [0x7fdb3c08b2ea] 2026-04-01T02:41:34.494 INFO:tasks.ceph.mon.b.vm06.stderr: 10: /lib64/libc.so.6(+0x1103c0) [0x7fdb3c1103c0] 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 0> 2026-04-01T02:41:34.482+0000 7fdb370e2640 -1 *** Caught signal (Aborted) ** 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: in thread 7fdb370e2640 thread_name:safe_timer 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 1: /lib64/libc.so.6(+0x3fc30) [0x7fdb3c03fc30] 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 2: /lib64/libc.so.6(+0x8d02c) [0x7fdb3c08d02c] 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 3: raise() 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 4: abort() 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 5: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0x186) [0x7fdb3cf902ba] 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 6: ceph-mon(+0x2a61ac) [0x564171eaf1ac] 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 7: (Elector::persist_connectivity_scores()+0x135) [0x564171f92865] 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 8: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x564171f9c0c1] 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 9: (Elector::dead_ping(int)+0x1a1) [0x564171f93b91] 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 10: ceph-mon(+0x2a644d) [0x564171eaf44d] 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 11: (CommonSafeTimer::timer_thread()+0x130) [0x7fdb3d0dc550] 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 12: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7fdb3d0dcfb1] 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 13: /lib64/libc.so.6(+0x8b2ea) [0x7fdb3c08b2ea] 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 14: /lib64/libc.so.6(+0x1103c0) [0x7fdb3c1103c0] 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. 2026-04-01T02:41:34.495 INFO:tasks.ceph.mon.b.vm06.stderr: 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.498 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.499 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr:problem writing to /var/log/ceph/ceph-mon.b.log: (28) No space left on device 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: -9999> 2026-04-01T02:41:34.481+0000 7fdb370e2640 -1 rocksdb: submit_common error: IO error: No space left on device: While appending to file: /var/lib/ceph/mon/ceph-b/store.db/000023.sst: No space left on device code =  Rocksdb transaction: 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr:PutCF( prefix = monitor key = 'connectivity_scores' value size = 238) 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: -9998> 2026-04-01T02:41:34.482+0000 7fdb370e2640 -1 /runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: In function 'int MonitorDBStore::apply_transaction(TransactionRef)' thread 7fdb370e2640 time 2026-04-01T02:41:34.482919+0000 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr:/runner/scratch/rpms/ceph-debug/20.2.0-8-g0597158282e/BUILD/ceph-20.2.0-8-g0597158282e/src/mon/MonitorDBStore.h: 356: ceph_abort_msg("failed to write to db") 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 1: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0xc9) [0x7fdb3cf901fd] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 2: ceph-mon(+0x2a61ac) [0x564171eaf1ac] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 3: (Elector::persist_connectivity_scores()+0x135) [0x564171f92865] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 4: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x564171f9c0c1] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 5: (Elector::dead_ping(int)+0x1a1) [0x564171f93b91] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 6: ceph-mon(+0x2a644d) [0x564171eaf44d] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 7: (CommonSafeTimer::timer_thread()+0x130) [0x7fdb3d0dc550] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 8: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7fdb3d0dcfb1] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 9: /lib64/libc.so.6(+0x8b2ea) [0x7fdb3c08b2ea] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 10: /lib64/libc.so.6(+0x1103c0) [0x7fdb3c1103c0] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: -9997> 2026-04-01T02:41:34.482+0000 7fdb370e2640 -1 *** Caught signal (Aborted) ** 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: in thread 7fdb370e2640 thread_name:safe_timer 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: ceph version 20.2.0-8-g0597158282e (0597158282e6d69429e60df2354a6c8eed0e5bce) tentacle (stable - RelWithDebInfo) 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 1: /lib64/libc.so.6(+0x3fc30) [0x7fdb3c03fc30] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 2: /lib64/libc.so.6(+0x8d02c) [0x7fdb3c08d02c] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 3: raise() 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 4: abort() 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 5: (ceph::__ceph_abort(char const*, int, char const*, std::__cxx11::basic_string, std::allocator > const&)+0x186) [0x7fdb3cf902ba] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 6: ceph-mon(+0x2a61ac) [0x564171eaf1ac] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 7: (Elector::persist_connectivity_scores()+0x135) [0x564171f92865] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 8: (ConnectionTracker::report_dead_connection(int, double)+0x181) [0x564171f9c0c1] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 9: (Elector::dead_ping(int)+0x1a1) [0x564171f93b91] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 10: ceph-mon(+0x2a644d) [0x564171eaf44d] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 11: (CommonSafeTimer::timer_thread()+0x130) [0x7fdb3d0dc550] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 12: /usr/lib64/ceph/libceph-common.so.2(+0x2dcfb1) [0x7fdb3d0dcfb1] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 13: /lib64/libc.so.6(+0x8b2ea) [0x7fdb3c08b2ea] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 14: /lib64/libc.so.6(+0x1103c0) [0x7fdb3c1103c0] 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: NOTE: a copy of the executable, or `objdump -rdS ` is needed to interpret this. 2026-04-01T02:41:34.500 INFO:tasks.ceph.mon.b.vm06.stderr: 2026-04-01T02:41:34.620 INFO:tasks.ceph.mon.b.vm06.stderr:daemon-helper: command crashed with signal 6 2026-04-01T02:41:35.156 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~6s 2026-04-01T02:41:35.157 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~6s 2026-04-01T02:41:35.157 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~0s 2026-04-01T02:41:41.469 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~13s 2026-04-01T02:41:41.470 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~13s 2026-04-01T02:41:41.470 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~6s 2026-04-01T02:41:47.777 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~19s 2026-04-01T02:41:47.778 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~19s 2026-04-01T02:41:47.778 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~13s 2026-04-01T02:41:54.083 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~25s 2026-04-01T02:41:54.083 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~25s 2026-04-01T02:41:54.083 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~19s 2026-04-01T02:41:59.253 INFO:tasks.rgw.client.1.vm06.stdout:problem writing to /var/log/ceph/rgw.ceph.client.1.log: tee: /var/log/ceph/rgw.ceph.client.1.stdout: No space left on device 2026-04-01T02:41:59.253 INFO:tasks.rgw.client.1.vm06.stdout:(28) No space left on device 2026-04-01T02:42:00.391 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~32s 2026-04-01T02:42:00.392 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~32s 2026-04-01T02:42:00.392 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~25s 2026-04-01T02:42:06.699 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~38s 2026-04-01T02:42:06.699 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~38s 2026-04-01T02:42:06.699 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~32s 2026-04-01T02:42:13.006 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~44s 2026-04-01T02:42:13.006 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~44s 2026-04-01T02:42:13.006 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~38s 2026-04-01T02:42:19.313 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~51s 2026-04-01T02:42:19.313 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~51s 2026-04-01T02:42:19.313 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~44s 2026-04-01T02:42:25.621 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~57s 2026-04-01T02:42:25.621 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~57s 2026-04-01T02:42:25.621 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~50s 2026-04-01T02:42:31.926 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~63s 2026-04-01T02:42:31.927 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~63s 2026-04-01T02:42:31.927 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~57s 2026-04-01T02:42:38.232 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~69s 2026-04-01T02:42:38.233 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~69s 2026-04-01T02:42:38.233 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~63s 2026-04-01T02:42:44.544 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~76s 2026-04-01T02:42:44.544 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~76s 2026-04-01T02:42:44.544 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~69s 2026-04-01T02:42:50.850 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~82s 2026-04-01T02:42:50.850 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~82s 2026-04-01T02:42:50.850 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~76s 2026-04-01T02:42:57.157 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~88s 2026-04-01T02:42:57.157 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~88s 2026-04-01T02:42:57.157 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~82s 2026-04-01T02:43:03.464 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~95s 2026-04-01T02:43:03.464 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~95s 2026-04-01T02:43:03.464 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~88s 2026-04-01T02:43:09.770 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~101s 2026-04-01T02:43:09.770 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~101s 2026-04-01T02:43:09.770 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~95s 2026-04-01T02:43:16.076 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~107s 2026-04-01T02:43:16.076 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~107s 2026-04-01T02:43:16.076 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~101s 2026-04-01T02:43:22.383 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~114s 2026-04-01T02:43:22.383 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~114s 2026-04-01T02:43:22.383 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~107s 2026-04-01T02:43:28.689 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~120s 2026-04-01T02:43:28.689 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~120s 2026-04-01T02:43:28.689 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~114s 2026-04-01T02:43:34.995 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~126s 2026-04-01T02:43:34.995 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~126s 2026-04-01T02:43:34.995 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~120s 2026-04-01T02:43:41.303 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~133s 2026-04-01T02:43:41.303 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~133s 2026-04-01T02:43:41.303 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~126s 2026-04-01T02:43:47.609 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~139s 2026-04-01T02:43:47.609 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~139s 2026-04-01T02:43:47.609 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~132s 2026-04-01T02:43:53.915 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~145s 2026-04-01T02:43:53.915 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~145s 2026-04-01T02:43:53.915 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~139s 2026-04-01T02:44:00.226 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~151s 2026-04-01T02:44:00.226 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~151s 2026-04-01T02:44:00.226 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~145s 2026-04-01T02:44:06.532 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~158s 2026-04-01T02:44:06.532 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~158s 2026-04-01T02:44:06.532 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~151s 2026-04-01T02:44:12.837 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~164s 2026-04-01T02:44:12.837 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~164s 2026-04-01T02:44:12.837 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~158s 2026-04-01T02:44:19.144 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~170s 2026-04-01T02:44:19.144 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~170s 2026-04-01T02:44:19.144 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~164s 2026-04-01T02:44:25.450 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~177s 2026-04-01T02:44:25.450 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~177s 2026-04-01T02:44:25.451 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~170s 2026-04-01T02:44:31.756 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~183s 2026-04-01T02:44:31.757 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~183s 2026-04-01T02:44:31.757 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~177s 2026-04-01T02:44:38.068 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~189s 2026-04-01T02:44:38.069 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~189s 2026-04-01T02:44:38.069 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~183s 2026-04-01T02:44:44.376 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~196s 2026-04-01T02:44:44.376 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~196s 2026-04-01T02:44:44.376 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~189s 2026-04-01T02:44:50.682 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~202s 2026-04-01T02:44:50.682 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~202s 2026-04-01T02:44:50.682 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~196s 2026-04-01T02:44:56.989 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~208s 2026-04-01T02:44:56.989 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~208s 2026-04-01T02:44:56.989 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~202s 2026-04-01T02:45:03.296 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~215s 2026-04-01T02:45:03.296 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~215s 2026-04-01T02:45:03.296 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~208s 2026-04-01T02:45:09.603 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~221s 2026-04-01T02:45:09.603 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~221s 2026-04-01T02:45:09.603 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~214s 2026-04-01T02:45:15.915 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~227s 2026-04-01T02:45:15.915 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~227s 2026-04-01T02:45:15.915 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~221s 2026-04-01T02:45:22.221 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~233s 2026-04-01T02:45:22.221 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~233s 2026-04-01T02:45:22.221 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~227s 2026-04-01T02:45:28.527 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~240s 2026-04-01T02:45:28.527 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~240s 2026-04-01T02:45:28.527 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~233s 2026-04-01T02:45:34.835 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~246s 2026-04-01T02:45:34.835 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~246s 2026-04-01T02:45:34.835 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~240s 2026-04-01T02:45:41.146 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~252s 2026-04-01T02:45:41.147 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~252s 2026-04-01T02:45:41.147 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~246s 2026-04-01T02:45:47.458 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~259s 2026-04-01T02:45:47.458 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~259s 2026-04-01T02:45:47.458 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~252s 2026-04-01T02:45:53.765 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~265s 2026-04-01T02:45:53.765 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~265s 2026-04-01T02:45:53.765 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~259s 2026-04-01T02:46:00.075 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~271s 2026-04-01T02:46:00.075 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~271s 2026-04-01T02:46:00.075 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~265s 2026-04-01T02:46:06.387 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~278s 2026-04-01T02:46:06.388 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~278s 2026-04-01T02:46:06.388 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~271s 2026-04-01T02:46:12.694 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~284s 2026-04-01T02:46:12.695 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~284s 2026-04-01T02:46:12.695 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~278s 2026-04-01T02:46:19.005 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~290s 2026-04-01T02:46:19.005 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~290s 2026-04-01T02:46:19.005 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~284s 2026-04-01T02:46:25.311 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~297s 2026-04-01T02:46:25.311 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~297s 2026-04-01T02:46:25.311 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~290s 2026-04-01T02:46:31.618 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.a is failed for ~303s 2026-04-01T02:46:31.618 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.c is failed for ~303s 2026-04-01T02:46:31.618 INFO:tasks.daemonwatchdog.daemon_watchdog:daemon ceph.mon.b is failed for ~296s 2026-04-01T02:46:31.618 INFO:tasks.daemonwatchdog.daemon_watchdog:BARK! unmounting mounts and killing all daemons 2026-04-01T02:46:32.925 INFO:tasks.ceph.osd.0:Sent signal 15 2026-04-01T02:46:32.925 INFO:tasks.ceph.osd.1:Sent signal 15 2026-04-01T02:46:32.925 INFO:tasks.ceph.osd.2:Sent signal 15 2026-04-01T02:46:32.925 INFO:tasks.ceph.osd.3:Sent signal 15 2026-04-01T02:46:32.925 INFO:tasks.ceph.osd.4:Sent signal 15 2026-04-01T02:46:32.925 INFO:tasks.ceph.osd.5:Sent signal 15 2026-04-01T02:46:32.925 INFO:tasks.ceph.osd.6:Sent signal 15 2026-04-01T02:46:32.925 INFO:tasks.ceph.osd.7:Sent signal 15 2026-04-01T02:46:32.925 INFO:tasks.rgw.client.0:Sent signal 15 2026-04-01T02:46:32.925 INFO:tasks.rgw.client.1:Sent signal 15 2026-04-01T02:46:32.926 INFO:tasks.rgw.client.2:Sent signal 15 2026-04-01T02:46:32.926 INFO:tasks.ceph.mgr.y:Sent signal 15 2026-04-01T02:46:32.926 INFO:tasks.ceph.mgr.x:Sent signal 15 2026-04-01T02:46:32.926 INFO:tasks.ceph.osd.0.vm03.stderr:2026-04-01T02:46:32.923+0000 7fd161085640 -1 received signal: Terminated from /usr/bin/python3 /bin/daemon-helper kill ceph-osd -f --cluster ceph -i 0 (PID: 62734) UID: 0 2026-04-01T02:46:32.926 INFO:tasks.ceph.osd.0.vm03.stderr:2026-04-01T02:46:32.923+0000 7fd161085640 -1 osd.0 71 *** Got signal Terminated *** 2026-04-01T02:46:32.926 INFO:tasks.ceph.osd.0.vm03.stderr:2026-04-01T02:46:32.923+0000 7fd161085640 -1 osd.0 71 *** Immediate shutdown (osd_fast_shutdown=true) *** 2026-04-01T02:46:32.926 INFO:tasks.ceph.osd.7.vm06.stderr:2026-04-01T02:46:32.923+0000 7f1629e6e640 -1 received signal: Terminated from /usr/bin/python3 /bin/daemon-helper kill ceph-osd -f --cluster ceph -i 7 (PID: 59673) UID: 0 2026-04-01T02:46:32.926 INFO:tasks.ceph.osd.7.vm06.stderr:2026-04-01T02:46:32.923+0000 7f1629e6e640 -1 osd.7 71 *** Got signal Terminated *** 2026-04-01T02:46:32.926 INFO:tasks.ceph.osd.7.vm06.stderr:2026-04-01T02:46:32.923+0000 7f1629e6e640 -1 osd.7 71 *** Immediate shutdown (osd_fast_shutdown=true) *** 2026-04-01T02:46:32.926 INFO:tasks.ceph.osd.1.vm03.stderr:2026-04-01T02:46:32.923+0000 7fa34355a640 -1 received signal: Terminated from /usr/bin/python3 /bin/daemon-helper kill ceph-osd -f --cluster ceph -i 1 (PID: 62742) UID: 0 2026-04-01T02:46:32.926 INFO:tasks.ceph.osd.1.vm03.stderr:2026-04-01T02:46:32.923+0000 7fa34355a640 -1 osd.1 71 *** Got signal Terminated *** 2026-04-01T02:46:32.926 INFO:tasks.ceph.osd.1.vm03.stderr:2026-04-01T02:46:32.923+0000 7fa34355a640 -1 osd.1 71 *** Immediate shutdown (osd_fast_shutdown=true) *** 2026-04-01T02:46:32.927 INFO:tasks.ceph.osd.3.vm03.stderr:2026-04-01T02:46:32.924+0000 7f360a8ff640 -1 received signal: Terminated from /usr/bin/python3 /bin/daemon-helper kill ceph-osd -f --cluster ceph -i 3 (PID: 62749) UID: 0 2026-04-01T02:46:32.927 INFO:tasks.ceph.osd.3.vm03.stderr:2026-04-01T02:46:32.924+0000 7f360a8ff640 -1 osd.3 71 *** Got signal Terminated *** 2026-04-01T02:46:32.927 INFO:tasks.ceph.osd.3.vm03.stderr:2026-04-01T02:46:32.924+0000 7f360a8ff640 -1 osd.3 71 *** Immediate shutdown (osd_fast_shutdown=true) *** 2026-04-01T02:46:32.927 INFO:tasks.ceph.osd.5.vm06.stderr:2026-04-01T02:46:32.924+0000 7f5159235640 -1 received signal: Terminated from /usr/bin/python3 /bin/daemon-helper kill ceph-osd -f --cluster ceph -i 5 (PID: 59666) UID: 0 2026-04-01T02:46:32.927 INFO:tasks.rgw.client.0.vm03.stdout:2026-04-01T02:46:32.924+0000 7fa9309ec640 -1 received signal: Terminated from /usr/bin/python3 /bin/daemon-helper term radosgw --rgw-frontends beast port=80 -n client.0 --cluster ceph -k /etc/ceph/ceph.client.0.keyring --log-file /var/log/ceph/rgw.ceph.client.0.log --rgw_ops_log_socket_path /home/ubuntu/cephtest/rgw.opslog.ceph.client.0.sock --foreground (PID: 68615) UID: 0 2026-04-01T02:46:32.927 INFO:tasks.rgw.client.1.vm06.stdout:2026-04-01T02:46:32.924+0000 7f51a7605640 -1 received signal: Terminated from /usr/bin/python3 /bin/daemon-helper term radosgw --rgw-frontends beast port=80 -n client.1 --cluster ceph -k /etc/ceph/ceph.client.1.keyring --log-file /var/log/ceph/rgw.ceph.client.1.log --rgw_ops_log_socket_path /home/ubuntu/cephtest/rgw.opslog.ceph.client.1.sock --foreground (PID: 63914) UID: 0 2026-04-01T02:46:32.927 INFO:tasks.ceph.osd.5.vm06.stderr:2026-04-01T02:46:32.924+0000 7f5159235640 -1 osd.5 71 *** Got signal Terminated *** 2026-04-01T02:46:32.927 INFO:tasks.ceph.osd.5.vm06.stderr:2026-04-01T02:46:32.924+0000 7f5159235640 -1 osd.5 71 *** Immediate shutdown (osd_fast_shutdown=true) *** 2026-04-01T02:46:32.927 INFO:tasks.ceph.osd.4.vm06.stderr:2026-04-01T02:46:32.924+0000 7f8997839640 -1 received signal: Terminated from /usr/bin/python3 /bin/daemon-helper kill ceph-osd -f --cluster ceph -i 4 (PID: 59665) UID: 0 2026-04-01T02:46:32.927 INFO:tasks.ceph.osd.4.vm06.stderr:2026-04-01T02:46:32.924+0000 7f8997839640 -1 osd.4 71 *** Got signal Terminated *** 2026-04-01T02:46:32.927 INFO:tasks.ceph.osd.4.vm06.stderr:2026-04-01T02:46:32.924+0000 7f8997839640 -1 osd.4 71 *** Immediate shutdown (osd_fast_shutdown=true) *** 2026-04-01T02:46:32.927 INFO:tasks.rgw.client.0.vm03.stdout:2026-04-01T02:46:32.924+0000 7fa93424d980 -1 shutting down 2026-04-01T02:46:32.927 INFO:tasks.rgw.client.2.vm08.stdout:2026-04-01T02:46:32.924+0000 7faa06ed0640 -1 received signal: Terminated from /usr/bin/python3 /bin/daemon-helper term radosgw --rgw-frontends beast port=80 -n client.2 --cluster ceph -k /etc/ceph/ceph.client.2.keyring --log-file /var/log/ceph/rgw.ceph.client.2.log --rgw_ops_log_socket_path /home/ubuntu/cephtest/rgw.opslog.ceph.client.2.sock --foreground (PID: 52145) UID: 0 2026-04-01T02:46:32.927 INFO:tasks.rgw.client.2.vm08.stdout:2026-04-01T02:46:32.925+0000 7faa0a731980 -1 shutting down 2026-04-01T02:46:32.927 INFO:tasks.ceph.osd.2.vm03.stderr:2026-04-01T02:46:32.923+0000 7f6c8f171640 -1 received signal: Terminated from /usr/bin/python3 /bin/daemon-helper kill ceph-osd -f --cluster ceph -i 2 (PID: 62746) UID: 0 2026-04-01T02:46:32.928 INFO:tasks.ceph.osd.2.vm03.stderr:2026-04-01T02:46:32.923+0000 7f6c8f171640 -1 osd.2 71 *** Got signal Terminated *** 2026-04-01T02:46:32.928 INFO:tasks.ceph.osd.2.vm03.stderr:2026-04-01T02:46:32.923+0000 7f6c8f171640 -1 osd.2 71 *** Immediate shutdown (osd_fast_shutdown=true) *** 2026-04-01T02:46:32.930 INFO:tasks.rgw.client.1.vm06.stdout:2026-04-01T02:46:32.924+0000 7f51aae66980 -1 shutting down 2026-04-01T02:46:32.930 INFO:tasks.ceph.osd.6.vm06.stderr:2026-04-01T02:46:32.928+0000 7f6af6306640 -1 received signal: Terminated from /usr/bin/python3 /bin/daemon-helper kill ceph-osd -f --cluster ceph -i 6 (PID: 59671) UID: 0 2026-04-01T02:46:32.930 INFO:tasks.ceph.osd.6.vm06.stderr:2026-04-01T02:46:32.928+0000 7f6af6306640 -1 osd.6 71 *** Got signal Terminated *** 2026-04-01T02:46:32.930 INFO:tasks.ceph.osd.6.vm06.stderr:2026-04-01T02:46:32.928+0000 7f6af6306640 -1 osd.6 71 *** Immediate shutdown (osd_fast_shutdown=true) *** 2026-04-01T02:46:33.126 INFO:tasks.ceph.mgr.y.vm03.stderr:daemon-helper: command crashed with signal 15 2026-04-01T02:47:06.476 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_large_scale 2026-04-01T02:47:06.476 INFO:teuthology.orchestra.run.vm03.stdout:-------------------------------- live log call --------------------------------- 2026-04-01T02:47:06.476 INFO:teuthology.orchestra.run.vm03.stdout:WARNING dedup.test_dedup:test_dedup.py:2748 test_dedup_dry_large_scale: failed!! 2026-04-01T02:47:19.291 INFO:teuthology.orchestra.run.vm03.stdout:FAILED [ 97%] 2026-04-01T02:47:19.296 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_cleanup PASSED [100%] 2026-04-01T02:47:19.296 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.296 INFO:teuthology.orchestra.run.vm03.stdout:=================================== FAILURES =================================== 2026-04-01T02:47:19.296 INFO:teuthology.orchestra.run.vm03.stdout:__________________________ test_dedup_dry_large_scale __________________________ 2026-04-01T02:47:19.296 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.296 INFO:teuthology.orchestra.run.vm03.stdout:self = 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: def _new_conn(self): 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: """Establish a socket connection and set nodelay settings on it. 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: :return: New socket connection. 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: """ 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: extra_kw = {} 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: if self.source_address: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: extra_kw["source_address"] = self.source_address 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: if self.socket_options: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: extra_kw["socket_options"] = self.socket_options 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: try: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout:> conn = connection.create_connection( 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: (self._dns_host, self.port), self.timeout, **extra_kw 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: ) 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/urllib3/connection.py:174: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout:_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/urllib3/util/connection.py:95: in create_connection 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: raise err 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout:_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout:address = ('vm03.local', 80), timeout = 60, source_address = None 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout:socket_options = [(6, 1, 1)] 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: def create_connection( 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: address, 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: timeout=socket._GLOBAL_DEFAULT_TIMEOUT, 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: source_address=None, 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: socket_options=None, 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: ): 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: """Connect to *address* and return the socket object. 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: Convenience function. Connect to *address* (a 2-tuple ``(host, 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: port)``) and return the socket object. Passing the optional 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: *timeout* parameter will set the timeout on the socket instance 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: before attempting to connect. If no *timeout* is supplied, the 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: global default timeout setting returned by :func:`socket.getdefaulttimeout` 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: is used. If *source_address* is set it must be a tuple of (host, port) 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: for the socket to bind as a source address before making the connection. 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: An host of '' or port 0 tells the OS to use the default. 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: """ 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: host, port = address 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: if host.startswith("["): 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: host = host.strip("[]") 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: err = None 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: # Using the value from allowed_gai_family() in the context of getaddrinfo lets 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: # us select whether to work with IPv4 DNS records, IPv6 records, or both. 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: # The original create_connection function always returns all records. 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: family = allowed_gai_family() 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: try: 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: host.encode("idna") 2026-04-01T02:47:19.297 INFO:teuthology.orchestra.run.vm03.stdout: except UnicodeError: 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: return six.raise_from( 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: LocationParseError(u"'%s', label empty or too long" % host), None 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: ) 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: af, socktype, proto, canonname, sa = res 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: sock = None 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: try: 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: sock = socket.socket(af, socktype, proto) 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: # If provided, set socket level options before connecting. 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: _set_socket_options(sock, socket_options) 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: sock.settimeout(timeout) 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: if source_address: 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: sock.bind(source_address) 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout:> sock.connect(sa) 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout:E ConnectionRefusedError: [Errno 111] Connection refused 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/urllib3/util/connection.py:85: ConnectionRefusedError 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout:During handling of the above exception, another exception occurred: 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout:self = 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout:request = 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: def send(self, request): 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: try: 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: proxy_url = self._proxy_config.proxy_url_for(request.url) 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: manager = self._get_connection_manager(request.url, proxy_url) 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: conn = manager.connection_from_url(request.url) 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: self._setup_ssl_cert(conn, request.url, self._verify) 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: if ensure_boolean( 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: os.environ.get('BOTO_EXPERIMENTAL__ADD_PROXY_HOST_HEADER', '') 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: ): 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: # This is currently an "experimental" feature which provides 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: # no guarantees of backwards compatibility. It may be subject 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: # to change or removal in any patch version. Anyone opting in 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: # to this feature should strictly pin botocore. 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: host = urlparse(request.url).hostname 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: conn.proxy_headers['host'] = host 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.298 INFO:teuthology.orchestra.run.vm03.stdout: request_target = self._get_request_target(request.url, proxy_url) 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout:> urllib_response = conn.urlopen( 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: method=request.method, 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: url=request_target, 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: body=request.body, 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: headers=request.headers, 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: retries=Retry(False), 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: assert_same_host=False, 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: preload_content=False, 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: decode_content=False, 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: chunked=self._chunked(request.headers), 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: ) 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/httpsession.py:477: 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout:_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/urllib3/connectionpool.py:802: in urlopen 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: retries = retries.increment( 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/urllib3/util/retry.py:527: in increment 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: raise six.reraise(type(error), error, _stacktrace) 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/urllib3/packages/six.py:770: in reraise 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: raise value 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/urllib3/connectionpool.py:716: in urlopen 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: httplib_response = self._make_request( 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/urllib3/connectionpool.py:416: in _make_request 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout: conn.request(method, url, **httplib_request_kw) 2026-04-01T02:47:19.299 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/awsrequest.py:96: in request 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: rval = super().request(method, url, body, headers, *args, **kwargs) 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/urllib3/connection.py:244: in request 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: super(HTTPConnection, self).request(method, url, body=body, headers=headers) 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout:/usr/lib64/python3.9/http/client.py:1285: in request 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: self._send_request(method, url, body, headers, encode_chunked) 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout:/usr/lib64/python3.9/http/client.py:1331: in _send_request 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: self.endheaders(body, encode_chunked=encode_chunked) 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout:/usr/lib64/python3.9/http/client.py:1280: in endheaders 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: self._send_output(message_body, encode_chunked=encode_chunked) 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/awsrequest.py:123: in _send_output 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: self.send(msg) 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/awsrequest.py:223: in send 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: return super().send(str) 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout:/usr/lib64/python3.9/http/client.py:980: in send 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: self.connect() 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/urllib3/connection.py:205: in connect 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: conn = self._new_conn() 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout:_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout:self = 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: def _new_conn(self): 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: """Establish a socket connection and set nodelay settings on it. 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: :return: New socket connection. 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: """ 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: extra_kw = {} 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: if self.source_address: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: extra_kw["source_address"] = self.source_address 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: if self.socket_options: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: extra_kw["socket_options"] = self.socket_options 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: try: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: conn = connection.create_connection( 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: (self._dns_host, self.port), self.timeout, **extra_kw 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: ) 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: except SocketTimeout: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: raise ConnectTimeoutError( 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: self, 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: "Connection to %s timed out. (connect timeout=%s)" 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: % (self.host, self.timeout), 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: ) 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: except SocketError as e: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout:> raise NewConnectionError( 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: self, "Failed to establish a new connection: %s" % e 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: ) 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout:E urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/urllib3/connection.py:186: NewConnectionError 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout:During handling of the above exception, another exception occurred: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: @pytest.mark.basic_test 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: def test_dedup_dry_large_scale(): 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: #return 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.300 INFO:teuthology.orchestra.run.vm03.stdout: prepare_test() 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: max_copies_count=3 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: num_threads=64 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: num_files=32*1024 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: size=1*KB 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: files=[] 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: config=TransferConfig(multipart_threshold=size, multipart_chunksize=1*MB) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: log.debug("test_dedup_dry_large_scale_new: connect to AWS ...") 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: gen_files_fixed_size(files, num_files, size, max_copies_count) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: conns=get_connections(num_threads) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: bucket_names=get_buckets(num_threads) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: for i in range(num_threads): 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: conns[i].create_bucket(Bucket=bucket_names[i]) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: try: 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: threads_simple_dedup_with_tenants(files, conns, bucket_names, config, True) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: except: 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: log.warning("test_dedup_dry_large_scale: failed!!") 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: finally: 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: # cleanup must be executed even after a failure 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:> cleanup_all_buckets(bucket_names, conns) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py:2751: 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py:496: in cleanup_all_buckets 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: delete_bucket_with_all_objects(bucket_name, conn) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py:452: in delete_bucket_with_all_objects 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: listing=conn.list_objects(Bucket=bucket_name, Marker=marker, MaxKeys=max_keys) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/client.py:602: in _api_call 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: return self._make_api_call(operation_name, kwargs) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/context.py:123: in wrapper 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: return func(*args, **kwargs) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/client.py:1060: in _make_api_call 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: http, parsed_response = self._make_request( 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/client.py:1084: in _make_request 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: return self._endpoint.make_request(operation_model, request_dict) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/endpoint.py:119: in make_request 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: return self._send_request(request_dict, operation_model) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/endpoint.py:200: in _send_request 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: while self._needs_retry( 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/endpoint.py:360: in _needs_retry 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: responses = self._event_emitter.emit( 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/hooks.py:412: in emit 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: return self._emitter.emit(aliased_event_name, **kwargs) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/hooks.py:256: in emit 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: return self._emit(event_name, kwargs) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/hooks.py:239: in _emit 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: response = handler(**kwargs) 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/retryhandler.py:207: in __call__ 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout: if self._checker(**checker_kwargs): 2026-04-01T02:47:19.301 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/retryhandler.py:284: in __call__ 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: should_retry = self._should_retry( 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/retryhandler.py:320: in _should_retry 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: return self._checker(attempt_number, response, caught_exception) 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/retryhandler.py:363: in __call__ 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: checker_response = checker( 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/retryhandler.py:247: in __call__ 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: return self._check_caught_exception( 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/retryhandler.py:416: in _check_caught_exception 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: raise caught_exception 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/endpoint.py:279: in _do_get_response 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: http_response = self._send(request) 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/endpoint.py:383: in _send 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: return self.http_session.send(request) 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout:_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout:self = 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout:request = 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: def send(self, request): 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: try: 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: proxy_url = self._proxy_config.proxy_url_for(request.url) 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: manager = self._get_connection_manager(request.url, proxy_url) 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: conn = manager.connection_from_url(request.url) 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: self._setup_ssl_cert(conn, request.url, self._verify) 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: if ensure_boolean( 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: os.environ.get('BOTO_EXPERIMENTAL__ADD_PROXY_HOST_HEADER', '') 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: ): 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: # This is currently an "experimental" feature which provides 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: # no guarantees of backwards compatibility. It may be subject 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: # to change or removal in any patch version. Anyone opting in 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: # to this feature should strictly pin botocore. 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: host = urlparse(request.url).hostname 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: conn.proxy_headers['host'] = host 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: request_target = self._get_request_target(request.url, proxy_url) 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: urllib_response = conn.urlopen( 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: method=request.method, 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: url=request_target, 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: body=request.body, 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: headers=request.headers, 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: retries=Retry(False), 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: assert_same_host=False, 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: preload_content=False, 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: decode_content=False, 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: chunked=self._chunked(request.headers), 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: ) 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: http_response = botocore.awsrequest.AWSResponse( 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: request.url, 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: urllib_response.status, 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: urllib_response.headers, 2026-04-01T02:47:19.302 INFO:teuthology.orchestra.run.vm03.stdout: urllib_response, 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout: ) 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout: if not request.stream_output: 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout: # Cause the raw stream to be exhausted immediately. We do it 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout: # this way instead of using preload_content because 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout: # preload_content will never buffer chunked responses 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout: http_response.content 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout: return http_response 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout: except URLLib3SSLError as e: 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout: raise SSLError(endpoint_url=request.url, error=e) 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout: except (NewConnectionError, socket.gaierror) as e: 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout:> raise EndpointConnectionError(endpoint_url=request.url, error=e) 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout:E botocore.exceptions.EndpointConnectionError: Could not connect to the endpoint URL: "http://vm03.local:80/eyishakhqegtgjvj-86?marker=&max-keys=1000&encoding-type=url" 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout:.tox/py/lib/python3.9/site-packages/botocore/httpsession.py:506: EndpointConnectionError 2026-04-01T02:47:19.303 INFO:teuthology.orchestra.run.vm03.stdout:----------------------------- Captured stderr call ----------------------------- 2026-04-01T02:47:19.625 INFO:teuthology.orchestra.run.vm03.stdout:ignoring --setuser ceph since I am not root 2026-04-01T02:47:19.625 INFO:teuthology.orchestra.run.vm03.stdout:ignoring --setgroup ceph since I am not root 2026-04-01T02:47:19.625 INFO:teuthology.orchestra.run.vm03.stdout:ignoring --setuser ceph since I am not root 2026-04-01T02:47:19.625 INFO:teuthology.orchestra.run.vm03.stdout:ignoring --setgroup ceph since I am not root 2026-04-01T02:47:19.625 INFO:teuthology.orchestra.run.vm03.stdout:Process Process-80: 2026-04-01T02:47:19.625 INFO:teuthology.orchestra.run.vm03.stdout:Traceback (most recent call last): 2026-04-01T02:47:19.625 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/urllib3/connection.py", line 174, in _new_conn 2026-04-01T02:47:19.625 INFO:teuthology.orchestra.run.vm03.stdout: conn = connection.create_connection( 2026-04-01T02:47:19.625 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/urllib3/util/connection.py", line 95, in create_connection 2026-04-01T02:47:19.625 INFO:teuthology.orchestra.run.vm03.stdout: raise err 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/urllib3/util/connection.py", line 85, in create_connection 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: sock.connect(sa) 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout:ConnectionRefusedError: [Errno 111] Connection refused 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout:During handling of the above exception, another exception occurred: 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout:Traceback (most recent call last): 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/botocore/httpsession.py", line 477, in send 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: urllib_response = conn.urlopen( 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/urllib3/connectionpool.py", line 802, in urlopen 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: retries = retries.increment( 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/urllib3/util/retry.py", line 527, in increment 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: raise six.reraise(type(error), error, _stacktrace) 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/urllib3/packages/six.py", line 770, in reraise 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: raise value 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/urllib3/connectionpool.py", line 716, in urlopen 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: httplib_response = self._make_request( 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/urllib3/connectionpool.py", line 416, in _make_request 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: conn.request(method, url, **httplib_request_kw) 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/botocore/awsrequest.py", line 96, in request 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: rval = super().request(method, url, body, headers, *args, **kwargs) 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/urllib3/connection.py", line 244, in request 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: super(HTTPConnection, self).request(method, url, body=body, headers=headers) 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/usr/lib64/python3.9/http/client.py", line 1285, in request 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: self._send_request(method, url, body, headers, encode_chunked) 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/usr/lib64/python3.9/http/client.py", line 1331, in _send_request 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: self.endheaders(body, encode_chunked=encode_chunked) 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/usr/lib64/python3.9/http/client.py", line 1280, in endheaders 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: self._send_output(message_body, encode_chunked=encode_chunked) 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/botocore/awsrequest.py", line 123, in _send_output 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: self.send(msg) 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/botocore/awsrequest.py", line 223, in send 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: return super().send(str) 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/usr/lib64/python3.9/http/client.py", line 980, in send 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: self.connect() 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/urllib3/connection.py", line 205, in connect 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: conn = self._new_conn() 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/urllib3/connection.py", line 186, in _new_conn 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: raise NewConnectionError( 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout:urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout:During handling of the above exception, another exception occurred: 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout:Traceback (most recent call last): 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/usr/lib64/python3.9/multiprocessing/process.py", line 315, in _bootstrap 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: self.run() 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: File "/usr/lib64/python3.9/multiprocessing/process.py", line 108, in run 2026-04-01T02:47:19.626 INFO:teuthology.orchestra.run.vm03.stdout: self._target(*self._args, **self._kwargs) 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout: File "/home/ubuntu/cephtest/ceph/src/test/rgw/dedup/test_dedup.py 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout:------------------------------ Captured log call ------------------------------- 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout:WARNING dedup.test_dedup:test_dedup.py:2748 test_dedup_dry_large_scale: failed!! 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout:=============================== warnings summary =============================== 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_small_with_tenants 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_multipart 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_small_large_mix 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_basic_with_tenants 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_multipart_with_tenants 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_small_multipart_with_tenants 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_large_scale_with_tenants 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout:test_dedup.py::test_dedup_dry_large_scale 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout: /home/ubuntu/cephtest/ceph/src/test/rgw/dedup/.tox/py/lib/python3.9/site-packages/boto3/compat.py:89: PythonDeprecationWarning: Boto3 will no longer support Python 3.9 starting April 29, 2026. To continue receiving service updates, bug fixes, and security updates please upgrade to Python 3.10 or later. More information can be found here: https://aws.amazon.com/blogs/developer/python-support-policy-updates-for-aws-sdks-and-tools/ 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout: warnings.warn(warning, PythonDeprecationWarning) 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout:-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout:=========================== short test summary info ============================ 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout:FAILED test_dedup.py::test_dedup_dry_large_scale - botocore.exceptions.Endpoi... 2026-04-01T02:47:19.627 INFO:teuthology.orchestra.run.vm03.stdout:============ 1 failed, 33 passed, 8 warnings in 1297.95s (0:21:37) ============= 2026-04-01T02:47:19.796 INFO:teuthology.orchestra.run.vm03.stdout:py: exit 1 (1298.55 seconds) /home/ubuntu/cephtest/ceph/src/test/rgw/dedup> pytest -v -m 'basic_test or request_test or example_test' pid=69582 2026-04-01T02:47:19.796 INFO:teuthology.orchestra.run.vm03.stdout: py: FAIL code 1 (1301.58=setup[3.03]+cmd[1298.55] seconds) 2026-04-01T02:47:19.796 INFO:teuthology.orchestra.run.vm03.stdout: evaluation failed :( (1301.59 seconds) 2026-04-01T02:47:19.818 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T02:47:19.818 ERROR:teuthology.contextutil:Saw exception from nested tasks Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 30, in nested vars.append(enter()) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 135, in __enter__ return next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/dedup_tests.py", line 191, in run_tests toxvenv_sh(ctx, remote, args, label="dedup tests against rgw") File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/dedup_tests.py", line 165, in toxvenv_sh return remote.sh(['source', activate, run.Raw('&&')] + args, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 97, in sh proc = self.run(**kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed (dedup tests against rgw) on vm03 with status 1: "source /home/ubuntu/cephtest/tox-venv/bin/activate && cd /home/ubuntu/cephtest/ceph/src/test/rgw/dedup/ && DEDUPTESTS_CONF=./deduptests.client.0.conf tox -- -v -m 'basic_test or request_test or example_test'" 2026-04-01T02:47:19.819 INFO:tasks.dedup_tests:Removing dedup-tests.conf file... 2026-04-01T02:47:19.819 DEBUG:teuthology.orchestra.run.vm03:> rm -f /home/ubuntu/cephtest/ceph/src/test/rgw/dedup/deduptests.client.0.conf 2026-04-01T02:47:19.838 DEBUG:teuthology.orchestra.run.vm03:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin -n client.0 user rm --uid foo.client.0 --purge-data --cluster ceph 2026-04-01T02:47:19.912 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:47:19.912 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:52:19.914 INFO:teuthology.orchestra.run.vm03.stderr:failed to fetch mon config (--no-mon-config to skip) 2026-04-01T02:52:19.916 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T02:52:19.916 INFO:tasks.dedup_tests:Removing dedup-tests... 2026-04-01T02:52:19.916 DEBUG:teuthology.orchestra.run.vm03:> rm -rf /home/ubuntu/cephtest/ceph 2026-04-01T02:52:20.426 ERROR:teuthology.run_tasks:Saw exception from tasks. Traceback (most recent call last): File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/dedup_tests.py", line 107, in create_users yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 30, in nested vars.append(enter()) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 135, in __enter__ return next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/dedup_tests.py", line 191, in run_tests toxvenv_sh(ctx, remote, args, label="dedup tests against rgw") File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/dedup_tests.py", line 165, in toxvenv_sh return remote.sh(['source', activate, run.Raw('&&')] + args, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 97, in sh proc = self.run(**kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed (dedup tests against rgw) on vm03 with status 1: "source /home/ubuntu/cephtest/tox-venv/bin/activate && cd /home/ubuntu/cephtest/ceph/src/test/rgw/dedup/ && DEDUPTESTS_CONF=./deduptests.client.0.conf tox -- -v -m 'basic_test or request_test or example_test'" During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 112, in run_tasks manager.__enter__() File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 135, in __enter__ return next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/dedup_tests.py", line 240, in task with contextutil.nested( File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 135, in __enter__ return next(self.gen) File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 54, in nested raise exc[1] File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/dedup_tests.py", line 45, in download yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 46, in nested if exit(*exc): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/dedup_tests.py", line 114, in create_users ctx.cluster.only(client).run( File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/cluster.py", line 85, in run procs = [remote.run(**kwargs, wait=_wait) for remote in remotes] File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/cluster.py", line 85, in procs = [remote.run(**kwargs, wait=_wait) for remote in remotes] File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin -n client.0 user rm --uid foo.client.0 --purge-data --cluster ceph' 2026-04-01T02:52:20.427 DEBUG:teuthology.run_tasks:Unwinding manager dedup-tests 2026-04-01T02:52:20.429 DEBUG:teuthology.run_tasks:Unwinding manager tox 2026-04-01T02:52:20.431 DEBUG:teuthology.orchestra.run.vm03:> rm -rf /home/ubuntu/cephtest/tox-venv 2026-04-01T02:52:20.495 DEBUG:teuthology.run_tasks:Unwinding manager tox 2026-04-01T02:52:20.497 DEBUG:teuthology.orchestra.run.vm03:> rm -rf /home/ubuntu/cephtest/tox-venv 2026-04-01T02:52:20.511 DEBUG:teuthology.run_tasks:Unwinding manager rgw 2026-04-01T02:52:20.513 DEBUG:tasks.rgw.client.0:waiting for process to exit 2026-04-01T02:52:20.513 INFO:teuthology.orchestra.run:waiting for 300 2026-04-01T02:52:20.513 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T02:52:20.513 ERROR:teuthology.orchestra.daemon.state:Error while waiting for process to exit Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/daemon/state.py", line 146, in stop run.wait([self.proc], timeout=timeout) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 485, in wait proc.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: "sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper term radosgw --rgw-frontends 'beast port=80' -n client.0 --cluster ceph -k /etc/ceph/ceph.client.0.keyring --log-file /var/log/ceph/rgw.ceph.client.0.log --rgw_ops_log_socket_path /home/ubuntu/cephtest/rgw.opslog.ceph.client.0.sock --foreground | sudo tee /var/log/ceph/rgw.ceph.client.0.stdout 2>&1" 2026-04-01T02:52:20.513 INFO:tasks.rgw.client.0:Stopped 2026-04-01T02:52:20.513 DEBUG:teuthology.orchestra.run.vm03:> rm -f /home/ubuntu/cephtest/rgw.opslog.ceph.client.0.sock 2026-04-01T02:52:20.565 DEBUG:teuthology.orchestra.run.vm03:> sudo rm -f /etc/ceph/vault-root-token 2026-04-01T02:52:20.633 DEBUG:teuthology.orchestra.run.vm03:> sudo rm -f /home/ubuntu/cephtest/url_file 2026-04-01T02:52:20.694 INFO:tasks.util.rgw:rgwadmin: client.0 : ['gc', 'process', '--include-all'] 2026-04-01T02:52:20.694 DEBUG:tasks.util.rgw:rgwadmin: cmd=['adjust-ulimits', 'ceph-coverage', '/home/ubuntu/cephtest/archive/coverage', 'radosgw-admin', '--log-to-stderr', '--format', 'json', '-n', 'client.0', '--cluster', 'ceph', 'gc', 'process', '--include-all'] 2026-04-01T02:52:20.694 DEBUG:teuthology.orchestra.run.vm03:> adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph gc process --include-all 2026-04-01T02:52:20.768 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setuser ceph since I am not root 2026-04-01T02:52:20.768 INFO:teuthology.orchestra.run.vm03.stderr:ignoring --setgroup ceph since I am not root 2026-04-01T02:57:20.769 INFO:teuthology.orchestra.run.vm03.stderr:2026-04-01T02:57:20.767+0000 7fd8f0f3a900 0 monclient(hunting): authenticate timed out after 300 2026-04-01T02:57:20.769 INFO:teuthology.orchestra.run.vm03.stderr:failed to fetch mon config (--no-mon-config to skip) 2026-04-01T02:57:20.771 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T02:57:20.771 ERROR:teuthology.run_tasks:Manager failed: rgw Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 160, in run_tasks suppress = manager.__exit__(*exc_info) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 552, in task with contextutil.nested(*subtasks): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 54, in nested raise exc[1] File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 364, in create_pools yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 46, in nested if exit(*exc): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 269, in start_rgw rgwadmin(ctx, client, cmd=['gc', 'process', '--include-all'], check_status=True) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/util/rgw.py", line 34, in rgwadmin proc = remote.run( File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph gc process --include-all' 2026-04-01T02:57:20.771 DEBUG:teuthology.run_tasks:Unwinding manager openssl_keys 2026-04-01T02:57:20.774 DEBUG:teuthology.run_tasks:Unwinding manager ceph 2026-04-01T02:57:20.776 INFO:tasks.ceph.ceph_manager.ceph:waiting for clean 2026-04-01T02:57:20.776 DEBUG:teuthology.orchestra.run.vm03:> sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph pg dump --format=json 2026-04-01T02:59:20.841 DEBUG:teuthology.orchestra.run:got remote process result: 124 2026-04-01T02:59:20.841 ERROR:teuthology.contextutil:Saw exception from nested tasks Traceback (most recent call last): File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 2001, in task yield File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 160, in run_tasks suppress = manager.__exit__(*exc_info) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 552, in task with contextutil.nested(*subtasks): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 54, in nested raise exc[1] File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 364, in create_pools yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 46, in nested if exit(*exc): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 269, in start_rgw rgwadmin(ctx, client, cmd=['gc', 'process', '--include-all'], check_status=True) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/util/rgw.py", line 34, in rgwadmin proc = remote.run( File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph gc process --include-all' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 32, in nested yield vars File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 2011, in task ctx.managers[config['cluster']].wait_for_clean() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2919, in wait_for_clean num_active_clean = self.get_num_active_clean() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2698, in get_num_active_clean pgs = self.get_pg_stats() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2464, in get_pg_stats out = self.raw_cluster_cmd('pg', 'dump', '--format=json') File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 1696, in raw_cluster_cmd return self.run_cluster_cmd(**kwargs).stdout.getvalue() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 1687, in run_cluster_cmd return self.controller.run(**kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 124: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph pg dump --format=json' 2026-04-01T02:59:20.842 INFO:teuthology.misc:Shutting down mds daemons... 2026-04-01T02:59:20.842 INFO:teuthology.misc:Shutting down osd daemons... 2026-04-01T02:59:20.842 DEBUG:tasks.ceph.osd.0:waiting for process to exit 2026-04-01T02:59:20.842 INFO:teuthology.orchestra.run:waiting for 300 2026-04-01T02:59:20.842 INFO:tasks.ceph.osd.0:Stopped 2026-04-01T02:59:20.842 DEBUG:tasks.ceph.osd.1:waiting for process to exit 2026-04-01T02:59:20.842 INFO:teuthology.orchestra.run:waiting for 300 2026-04-01T02:59:20.842 INFO:tasks.ceph.osd.1:Stopped 2026-04-01T02:59:20.842 DEBUG:tasks.ceph.osd.2:waiting for process to exit 2026-04-01T02:59:20.842 INFO:teuthology.orchestra.run:waiting for 300 2026-04-01T02:59:20.842 INFO:tasks.ceph.osd.2:Stopped 2026-04-01T02:59:20.842 DEBUG:tasks.ceph.osd.3:waiting for process to exit 2026-04-01T02:59:20.842 INFO:teuthology.orchestra.run:waiting for 300 2026-04-01T02:59:20.842 INFO:tasks.ceph.osd.3:Stopped 2026-04-01T02:59:20.842 DEBUG:tasks.ceph.osd.4:waiting for process to exit 2026-04-01T02:59:20.842 INFO:teuthology.orchestra.run:waiting for 300 2026-04-01T02:59:20.842 INFO:tasks.ceph.osd.4:Stopped 2026-04-01T02:59:20.842 DEBUG:tasks.ceph.osd.5:waiting for process to exit 2026-04-01T02:59:20.842 INFO:teuthology.orchestra.run:waiting for 300 2026-04-01T02:59:20.842 INFO:tasks.ceph.osd.5:Stopped 2026-04-01T02:59:20.842 DEBUG:tasks.ceph.osd.6:waiting for process to exit 2026-04-01T02:59:20.842 INFO:teuthology.orchestra.run:waiting for 300 2026-04-01T02:59:20.842 INFO:tasks.ceph.osd.6:Stopped 2026-04-01T02:59:20.842 DEBUG:tasks.ceph.osd.7:waiting for process to exit 2026-04-01T02:59:20.842 INFO:teuthology.orchestra.run:waiting for 300 2026-04-01T02:59:20.842 INFO:tasks.ceph.osd.7:Stopped 2026-04-01T02:59:20.842 INFO:teuthology.misc:Shutting down mgr daemons... 2026-04-01T02:59:20.842 DEBUG:tasks.ceph.mgr.y:waiting for process to exit 2026-04-01T02:59:20.842 INFO:teuthology.orchestra.run:waiting for 300 2026-04-01T02:59:20.842 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T02:59:20.842 ERROR:teuthology.orchestra.daemon.state:Error while waiting for process to exit Traceback (most recent call last): File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 2001, in task yield File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 160, in run_tasks suppress = manager.__exit__(*exc_info) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 552, in task with contextutil.nested(*subtasks): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 54, in nested raise exc[1] File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 364, in create_pools yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 46, in nested if exit(*exc): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 269, in start_rgw rgwadmin(ctx, client, cmd=['gc', 'process', '--include-all'], check_status=True) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/util/rgw.py", line 34, in rgwadmin proc = remote.run( File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph gc process --include-all' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 1526, in run_daemon yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 32, in nested yield vars File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 2011, in task ctx.managers[config['cluster']].wait_for_clean() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2919, in wait_for_clean num_active_clean = self.get_num_active_clean() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2698, in get_num_active_clean pgs = self.get_pg_stats() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2464, in get_pg_stats out = self.raw_cluster_cmd('pg', 'dump', '--format=json') File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 1696, in raw_cluster_cmd return self.run_cluster_cmd(**kwargs).stdout.getvalue() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 1687, in run_cluster_cmd return self.controller.run(**kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 124: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph pg dump --format=json' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/daemon/state.py", line 146, in stop run.wait([self.proc], timeout=timeout) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 485, in wait proc.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-mgr -f --cluster ceph -i y' 2026-04-01T02:59:20.842 INFO:tasks.ceph.mgr.y:Stopped 2026-04-01T02:59:20.842 DEBUG:tasks.ceph.mgr.x:waiting for process to exit 2026-04-01T02:59:20.842 INFO:teuthology.orchestra.run:waiting for 300 2026-04-01T02:59:20.843 INFO:tasks.ceph.mgr.x:Stopped 2026-04-01T02:59:20.843 INFO:teuthology.misc:Shutting down mon daemons... 2026-04-01T02:59:20.843 DEBUG:tasks.ceph.mon.a:waiting for process to exit 2026-04-01T02:59:20.843 INFO:teuthology.orchestra.run:waiting for 300 2026-04-01T02:59:20.843 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T02:59:20.843 ERROR:teuthology.orchestra.daemon.state:Error while waiting for process to exit Traceback (most recent call last): File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 2001, in task yield File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 160, in run_tasks suppress = manager.__exit__(*exc_info) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 552, in task with contextutil.nested(*subtasks): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 54, in nested raise exc[1] File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 364, in create_pools yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 46, in nested if exit(*exc): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 269, in start_rgw rgwadmin(ctx, client, cmd=['gc', 'process', '--include-all'], check_status=True) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/util/rgw.py", line 34, in rgwadmin proc = remote.run( File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph gc process --include-all' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 1526, in run_daemon yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 32, in nested yield vars File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 2011, in task ctx.managers[config['cluster']].wait_for_clean() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2919, in wait_for_clean num_active_clean = self.get_num_active_clean() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2698, in get_num_active_clean pgs = self.get_pg_stats() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2464, in get_pg_stats out = self.raw_cluster_cmd('pg', 'dump', '--format=json') File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 1696, in raw_cluster_cmd return self.run_cluster_cmd(**kwargs).stdout.getvalue() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 1687, in run_cluster_cmd return self.controller.run(**kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 124: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph pg dump --format=json' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/daemon/state.py", line 146, in stop run.wait([self.proc], timeout=timeout) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 485, in wait proc.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-mon -f --cluster ceph -i a' 2026-04-01T02:59:20.843 INFO:tasks.ceph.mon.a:Stopped 2026-04-01T02:59:20.843 DEBUG:tasks.ceph.mon.c:waiting for process to exit 2026-04-01T02:59:20.843 INFO:teuthology.orchestra.run:waiting for 300 2026-04-01T02:59:20.843 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T02:59:20.843 ERROR:teuthology.orchestra.daemon.state:Error while waiting for process to exit Traceback (most recent call last): File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 2001, in task yield File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 160, in run_tasks suppress = manager.__exit__(*exc_info) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 552, in task with contextutil.nested(*subtasks): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 54, in nested raise exc[1] File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 364, in create_pools yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 46, in nested if exit(*exc): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 269, in start_rgw rgwadmin(ctx, client, cmd=['gc', 'process', '--include-all'], check_status=True) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/util/rgw.py", line 34, in rgwadmin proc = remote.run( File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph gc process --include-all' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 1526, in run_daemon yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 32, in nested yield vars File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 2011, in task ctx.managers[config['cluster']].wait_for_clean() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2919, in wait_for_clean num_active_clean = self.get_num_active_clean() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2698, in get_num_active_clean pgs = self.get_pg_stats() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2464, in get_pg_stats out = self.raw_cluster_cmd('pg', 'dump', '--format=json') File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 1696, in raw_cluster_cmd return self.run_cluster_cmd(**kwargs).stdout.getvalue() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 1687, in run_cluster_cmd return self.controller.run(**kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 124: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph pg dump --format=json' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/daemon/state.py", line 146, in stop run.wait([self.proc], timeout=timeout) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 485, in wait proc.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-mon -f --cluster ceph -i c' 2026-04-01T02:59:20.843 INFO:tasks.ceph.mon.c:Stopped 2026-04-01T02:59:20.843 DEBUG:tasks.ceph.mon.b:waiting for process to exit 2026-04-01T02:59:20.843 INFO:teuthology.orchestra.run:waiting for 300 2026-04-01T02:59:20.843 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T02:59:20.843 ERROR:teuthology.orchestra.daemon.state:Error while waiting for process to exit Traceback (most recent call last): File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 2001, in task yield File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 160, in run_tasks suppress = manager.__exit__(*exc_info) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 552, in task with contextutil.nested(*subtasks): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 54, in nested raise exc[1] File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 364, in create_pools yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 46, in nested if exit(*exc): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 269, in start_rgw rgwadmin(ctx, client, cmd=['gc', 'process', '--include-all'], check_status=True) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/util/rgw.py", line 34, in rgwadmin proc = remote.run( File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph gc process --include-all' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 1526, in run_daemon yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 32, in nested yield vars File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 2011, in task ctx.managers[config['cluster']].wait_for_clean() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2919, in wait_for_clean num_active_clean = self.get_num_active_clean() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2698, in get_num_active_clean pgs = self.get_pg_stats() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2464, in get_pg_stats out = self.raw_cluster_cmd('pg', 'dump', '--format=json') File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 1696, in raw_cluster_cmd return self.run_cluster_cmd(**kwargs).stdout.getvalue() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 1687, in run_cluster_cmd return self.controller.run(**kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 124: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph pg dump --format=json' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/daemon/state.py", line 146, in stop run.wait([self.proc], timeout=timeout) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 485, in wait proc.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm06 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-mon -f --cluster ceph -i b' 2026-04-01T02:59:20.843 INFO:tasks.ceph.mon.b:Stopped 2026-04-01T02:59:20.843 INFO:tasks.ceph:Checking cluster log for badness... 2026-04-01T02:59:20.843 DEBUG:teuthology.orchestra.run.vm03:> sudo egrep '\[ERR\]|\[WRN\]|\[SEC\]' /var/log/ceph/ceph.log | egrep -v '\(MDS_ALL_DOWN\)' | egrep -v '\(MDS_UP_LESS_THAN_MAX\)' | egrep -v '\(PG_AVAILABILITY\)' | egrep -v '\(PG_DEGRADED\)' | egrep -v '\(POOL_APP_NOT_ENABLED\)' | egrep -v 'not have an application enabled' | head -n 1 2026-04-01T02:59:20.872 INFO:teuthology.orchestra.run.vm03.stdout:2026-04-01T02:36:24.292779+0000 mon.a (mon.0) 676 : cluster [ERR] Health check failed: mon a is very low on available space (MON_DISK_CRIT) 2026-04-01T02:59:20.872 WARNING:tasks.ceph:Found errors (ERR|WRN|SEC) in cluster log 2026-04-01T02:59:20.872 INFO:tasks.ceph:Unmounting /var/lib/ceph/osd/ceph-0 on ubuntu@vm03.local 2026-04-01T02:59:20.872 DEBUG:teuthology.orchestra.run.vm03:> sync && sudo umount -f /var/lib/ceph/osd/ceph-0 2026-04-01T02:59:20.987 INFO:tasks.ceph:Unmounting /var/lib/ceph/osd/ceph-1 on ubuntu@vm03.local 2026-04-01T02:59:20.987 DEBUG:teuthology.orchestra.run.vm03:> sync && sudo umount -f /var/lib/ceph/osd/ceph-1 2026-04-01T02:59:21.060 INFO:tasks.ceph:Unmounting /var/lib/ceph/osd/ceph-2 on ubuntu@vm03.local 2026-04-01T02:59:21.061 DEBUG:teuthology.orchestra.run.vm03:> sync && sudo umount -f /var/lib/ceph/osd/ceph-2 2026-04-01T02:59:21.131 INFO:tasks.ceph:Unmounting /var/lib/ceph/osd/ceph-3 on ubuntu@vm03.local 2026-04-01T02:59:21.131 DEBUG:teuthology.orchestra.run.vm03:> sync && sudo umount -f /var/lib/ceph/osd/ceph-3 2026-04-01T02:59:21.208 INFO:tasks.ceph:Unmounting /var/lib/ceph/osd/ceph-4 on ubuntu@vm06.local 2026-04-01T02:59:21.208 DEBUG:teuthology.orchestra.run.vm06:> sync && sudo umount -f /var/lib/ceph/osd/ceph-4 2026-04-01T02:59:21.326 INFO:tasks.ceph:Unmounting /var/lib/ceph/osd/ceph-5 on ubuntu@vm06.local 2026-04-01T02:59:21.327 DEBUG:teuthology.orchestra.run.vm06:> sync && sudo umount -f /var/lib/ceph/osd/ceph-5 2026-04-01T02:59:21.423 INFO:tasks.ceph:Unmounting /var/lib/ceph/osd/ceph-6 on ubuntu@vm06.local 2026-04-01T02:59:21.423 DEBUG:teuthology.orchestra.run.vm06:> sync && sudo umount -f /var/lib/ceph/osd/ceph-6 2026-04-01T02:59:21.528 INFO:tasks.ceph:Unmounting /var/lib/ceph/osd/ceph-7 on ubuntu@vm06.local 2026-04-01T02:59:21.528 DEBUG:teuthology.orchestra.run.vm06:> sync && sudo umount -f /var/lib/ceph/osd/ceph-7 2026-04-01T02:59:21.627 INFO:tasks.ceph:Archiving mon data... 2026-04-01T02:59:21.627 DEBUG:teuthology.misc:Transferring archived files from vm03:/var/lib/ceph/mon/ceph-a to /archive/supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps/4640/data/mon.a.tgz 2026-04-01T02:59:21.627 DEBUG:teuthology.orchestra.run.vm03:> mktemp 2026-04-01T02:59:21.642 INFO:teuthology.orchestra.run.vm03.stdout:/tmp/tmp.omh1XT3GN8 2026-04-01T02:59:21.642 DEBUG:teuthology.orchestra.run.vm03:> sudo tar cz -f - -C /var/lib/ceph/mon/ceph-a -- . > /tmp/tmp.omh1XT3GN8 2026-04-01T02:59:21.779 DEBUG:teuthology.orchestra.run.vm03:> sudo chmod 0666 /tmp/tmp.omh1XT3GN8 2026-04-01T02:59:21.856 DEBUG:teuthology.orchestra.remote:vm03:/tmp/tmp.omh1XT3GN8 is 518KB 2026-04-01T02:59:21.914 DEBUG:teuthology.orchestra.run.vm03:> rm -fr /tmp/tmp.omh1XT3GN8 2026-04-01T02:59:21.927 DEBUG:teuthology.misc:Transferring archived files from vm03:/var/lib/ceph/mon/ceph-c to /archive/supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps/4640/data/mon.c.tgz 2026-04-01T02:59:21.927 DEBUG:teuthology.orchestra.run.vm03:> mktemp 2026-04-01T02:59:21.981 INFO:teuthology.orchestra.run.vm03.stdout:/tmp/tmp.anSO4ROK7e 2026-04-01T02:59:21.982 DEBUG:teuthology.orchestra.run.vm03:> sudo tar cz -f - -C /var/lib/ceph/mon/ceph-c -- . > /tmp/tmp.anSO4ROK7e 2026-04-01T02:59:22.120 DEBUG:teuthology.orchestra.run.vm03:> sudo chmod 0666 /tmp/tmp.anSO4ROK7e 2026-04-01T02:59:22.196 DEBUG:teuthology.orchestra.remote:vm03:/tmp/tmp.anSO4ROK7e is 542KB 2026-04-01T02:59:22.254 DEBUG:teuthology.orchestra.run.vm03:> rm -fr /tmp/tmp.anSO4ROK7e 2026-04-01T02:59:22.267 DEBUG:teuthology.misc:Transferring archived files from vm06:/var/lib/ceph/mon/ceph-b to /archive/supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps/4640/data/mon.b.tgz 2026-04-01T02:59:22.268 DEBUG:teuthology.orchestra.run.vm06:> mktemp 2026-04-01T02:59:22.283 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T02:59:22.283 INFO:teuthology.orchestra.run.vm06.stderr:mktemp: failed to create file via template ‘/tmp/tmp.XXXXXXXXXX’: No space left on device 2026-04-01T02:59:22.323 INFO:teuthology.util.scanner:summary_data or yaml_file is empty! 2026-04-01T02:59:22.337 INFO:teuthology.util.scanner:summary_data or yaml_file is empty! 2026-04-01T02:59:22.353 INFO:teuthology.util.scanner:summary_data or yaml_file is empty! 2026-04-01T02:59:22.353 INFO:tasks.ceph:Archiving crash dumps... 2026-04-01T02:59:22.353 DEBUG:teuthology.misc:Transferring archived files from vm03:/var/lib/ceph/crash to /archive/supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps/4640/remote/vm03/crash 2026-04-01T02:59:22.353 DEBUG:teuthology.orchestra.run.vm03:> sudo tar c -f - -C /var/lib/ceph/crash -- . 2026-04-01T02:59:22.384 DEBUG:teuthology.misc:Transferring archived files from vm06:/var/lib/ceph/crash to /archive/supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps/4640/remote/vm06/crash 2026-04-01T02:59:22.384 DEBUG:teuthology.orchestra.run.vm06:> sudo tar c -f - -C /var/lib/ceph/crash -- . 2026-04-01T02:59:22.408 DEBUG:teuthology.misc:Transferring archived files from vm08:/var/lib/ceph/crash to /archive/supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps/4640/remote/vm08/crash 2026-04-01T02:59:22.408 DEBUG:teuthology.orchestra.run.vm08:> sudo tar c -f - -C /var/lib/ceph/crash -- . 2026-04-01T02:59:22.436 INFO:tasks.ceph:Compressing logs... 2026-04-01T02:59:22.436 DEBUG:teuthology.orchestra.run.vm03:> time sudo find /var/log/ceph -name '*.log' -print0 | sudo xargs --max-args=1 --max-procs=0 --verbose -0 --no-run-if-empty -- gzip -5 --verbose -- 2026-04-01T02:59:22.438 DEBUG:teuthology.orchestra.run.vm06:> time sudo find /var/log/ceph -name '*.log' -print0 | sudo xargs --max-args=1 --max-procs=0 --verbose -0 --no-run-if-empty -- gzip -5 --verbose -- 2026-04-01T02:59:22.450 DEBUG:teuthology.orchestra.run.vm08:> time sudo find /var/log/ceph -name '*.log' -print0 | sudo xargs --max-args=1 --max-procs=0 --verbose -0 --no-run-if-empty -- gzip -5 --verbose -- 2026-04-01T02:59:22.459 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph.tmp-client.admin.55972.log 2026-04-01T02:59:22.459 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-osd.0.log 2026-04-01T02:59:22.459 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph.tmp-client.admin.55972.log: gzip -5 --verbose -- /var/log/ceph/ceph-osd.1.log 2026-04-01T02:59:22.459 INFO:teuthology.orchestra.run.vm03.stderr: 0.0% -- replaced with /var/log/ceph/ceph.tmp-client.admin.55972.log.gz 2026-04-01T02:59:22.459 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-osd.2.log 2026-04-01T02:59:22.459 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-osd.0.log: gzip -5 --verbose -- /var/log/ceph/ceph-osd.3.log 2026-04-01T02:59:22.471 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-osd.4.log 2026-04-01T02:59:22.472 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-osd.5.log 2026-04-01T02:59:22.472 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-osd.6.log 2026-04-01T02:59:22.472 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-osd.4.log.gz: No space left on device 2026-04-01T02:59:22.472 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-osd.5.log.gz: No space left on device 2026-04-01T02:59:22.473 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-osd.7.log 2026-04-01T02:59:22.473 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-osd.6.log.gz: No space left on device 2026-04-01T02:59:22.473 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-mon.b.log 2026-04-01T02:59:22.473 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-osd.7.log.gz: No space left on device 2026-04-01T02:59:22.473 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph.log 2026-04-01T02:59:22.473 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-mon.b.log.gz: No space left on device 2026-04-01T02:59:22.473 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-mgr.x.log 2026-04-01T02:59:22.474 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph.log.gz: No space left on device 2026-04-01T02:59:22.474 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph.audit.log 2026-04-01T02:59:22.474 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-mgr.x.log.gz: No space left on device 2026-04-01T02:59:22.474 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.59130.log 2026-04-01T02:59:22.474 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph.audit.log.gz: No space left on device 2026-04-01T02:59:22.474 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.59178.log 2026-04-01T02:59:22.475 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.admin.59130.log.gz: No space left on device 2026-04-01T02:59:22.475 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.59226.log 2026-04-01T02:59:22.475 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.admin.59178.log.gz: No space left on device 2026-04-01T02:59:22.475 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.59274.log 2026-04-01T02:59:22.475 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.admin.59226.log.gz: No space left on device 2026-04-01T02:59:22.475 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.59322.log 2026-04-01T02:59:22.475 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.admin.59274.log.gz: No space left on device 2026-04-01T02:59:22.476 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.59370.log 2026-04-01T02:59:22.476 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-osd.1.log: /var/log/ceph/ceph-osd.2.log: gzip -5 --verbose -- /var/log/ceph/ceph-mon.a.log 2026-04-01T02:59:22.476 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.admin.59322.log.gz: No space left on device 2026-04-01T02:59:22.476 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-mon.c.log 2026-04-01T02:59:22.476 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.59418.log 2026-04-01T02:59:22.476 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.59544.log 2026-04-01T02:59:22.476 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.admin.59370.log.gz: No space left on device 2026-04-01T02:59:22.476 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-osd.3.log: /var/log/ceph/ceph-mon.a.log: gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.62409.log 2026-04-01T02:59:22.477 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.admin.59418.log.gz: No space left on device 2026-04-01T02:59:22.477 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.63142.log 2026-04-01T02:59:22.477 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.admin.59544.log.gz: No space left on device 2026-04-01T02:59:22.477 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.63190.log 2026-04-01T02:59:22.477 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.admin.63142.log.gz: No space left on device 2026-04-01T02:59:22.477 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.63238.log 2026-04-01T02:59:22.477 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.admin.63190.log.gz: No space left on device 2026-04-01T02:59:22.478 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.63286.log 2026-04-01T02:59:22.478 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.admin.63238.log.gz: No space left on device 2026-04-01T02:59:22.478 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.1.63309.log 2026-04-01T02:59:22.478 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.admin.63286.log.gz: No space left on device 2026-04-01T02:59:22.478 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.1.63417.log 2026-04-01T02:59:22.479 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.1.63309.log.gz: No space left on device 2026-04-01T02:59:22.479 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.1.63520.log 2026-04-01T02:59:22.479 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.1.63417.log.gz: No space left on device 2026-04-01T02:59:22.479 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.1.63623.log 2026-04-01T02:59:22.479 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.1.63520.log.gz: No space left on device 2026-04-01T02:59:22.479 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.1.63726.log 2026-04-01T02:59:22.479 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.1.63623.log.gz: No space left on device 2026-04-01T02:59:22.480 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/rgw.ceph.client.1.log 2026-04-01T02:59:22.480 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ceph-client.1.63726.log.gz: No space left on device 2026-04-01T02:59:22.480 INFO:teuthology.orchestra.run.vm06.stderr:gzip -5 --verbose -- /var/log/ceph/ops-log-ceph-client.1.log 2026-04-01T02:59:22.480 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/rgw.ceph.client.1.log.gz: No space left on device 2026-04-01T02:59:22.480 INFO:teuthology.orchestra.run.vm06.stderr:gzip: /var/log/ceph/ops-log-ceph-client.1.log.gz: No space left on device 2026-04-01T02:59:22.481 INFO:teuthology.orchestra.run.vm06.stderr: 2026-04-01T02:59:22.481 INFO:teuthology.orchestra.run.vm06.stderr:real 0m0.019s 2026-04-01T02:59:22.481 INFO:teuthology.orchestra.run.vm06.stderr:user 0m0.014s 2026-04-01T02:59:22.481 INFO:teuthology.orchestra.run.vm06.stderr:sys 0m0.017s 2026-04-01T02:59:22.503 INFO:teuthology.orchestra.run.vm08.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.51373.log 2026-04-01T02:59:22.503 INFO:teuthology.orchestra.run.vm08.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.51421.log 2026-04-01T02:59:22.503 INFO:teuthology.orchestra.run.vm08.stderr:gzip/var/log/ceph/ceph-client.admin.51373.log: -5 --verbose -- /var/log/ceph/ceph-client.admin.51469.log 2026-04-01T02:59:22.503 INFO:teuthology.orchestra.run.vm08.stderr: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.51373.log.gz 2026-04-01T02:59:22.504 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-mon.c.log: gzip -5 --verbose -- /var/log/ceph/ceph.log 2026-04-01T02:59:22.504 INFO:teuthology.orchestra.run.vm08.stderr:/var/log/ceph/ceph-client.admin.51421.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.51421.log.gz 2026-04-01T02:59:22.504 INFO:teuthology.orchestra.run.vm08.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.51517.log 2026-04-01T02:59:22.504 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.62409.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.62409.log.gz 2026-04-01T02:59:22.504 INFO:teuthology.orchestra.run.vm08.stderr:/var/log/ceph/ceph-client.admin.51469.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.51469.log.gz 2026-04-01T02:59:22.504 INFO:teuthology.orchestra.run.vm08.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.2.51540.log 2026-04-01T02:59:22.504 INFO:teuthology.orchestra.run.vm08.stderr:/var/log/ceph/ceph-client.admin.51517.log: gzip -5 --verbose 0.0% -- replaced with /var/log/ceph/ceph-client.admin.51517.log.gz 2026-04-01T02:59:22.504 INFO:teuthology.orchestra.run.vm08.stderr: -- /var/log/ceph/ceph-client.2.51648.log 2026-04-01T02:59:22.505 INFO:teuthology.orchestra.run.vm08.stderr:/var/log/ceph/ceph-client.2.51540.log: 82.9% -- replaced with /var/log/ceph/ceph-client.2.51540.log.gz 2026-04-01T02:59:22.505 INFO:teuthology.orchestra.run.vm08.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.2.51751.log 2026-04-01T02:59:22.505 INFO:teuthology.orchestra.run.vm08.stderr:/var/log/ceph/ceph-client.2.51648.log: gzip -5 --verbose -- 45.3% -- replaced with /var/log/ceph/ceph-client.2.51648.log.gz 2026-04-01T02:59:22.505 INFO:teuthology.orchestra.run.vm08.stderr: /var/log/ceph/ceph-client.2.51854.log 2026-04-01T02:59:22.505 INFO:teuthology.orchestra.run.vm08.stderr:/var/log/ceph/ceph-client.2.51751.log: 44.2% -- replaced with /var/log/ceph/ceph-client.2.51751.log.gz 2026-04-01T02:59:22.505 INFO:teuthology.orchestra.run.vm08.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.2.51957.log 2026-04-01T02:59:22.506 INFO:teuthology.orchestra.run.vm08.stderr:/var/log/ceph/ceph-client.2.51854.log: gzip 45.3% -- replaced with /var/log/ceph/ceph-client.2.51854.log.gz 2026-04-01T02:59:22.506 INFO:teuthology.orchestra.run.vm08.stderr: -5 --verbose -- /var/log/ceph/rgw.ceph.client.2.log 2026-04-01T02:59:22.506 INFO:teuthology.orchestra.run.vm08.stderr:/var/log/ceph/ceph-client.2.51957.log: 43.9% -- replaced with /var/log/ceph/ceph-client.2.51957.log.gz 2026-04-01T02:59:22.506 INFO:teuthology.orchestra.run.vm08.stderr:gzip -5 --verbose -- /var/log/ceph/ops-log-ceph-client.2.log 2026-04-01T02:59:22.507 INFO:teuthology.orchestra.run.vm08.stderr:/var/log/ceph/rgw.ceph.client.2.log: /var/log/ceph/ops-log-ceph-client.2.log: 35.3% -- replaced with /var/log/ceph/ops-log-ceph-client.2.log.gz 2026-04-01T02:59:22.515 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-mgr.y.log 2026-04-01T02:59:22.516 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph.log: 92.8% -- replaced with /var/log/ceph/ceph.log.gz 2026-04-01T02:59:22.522 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.62492.log 2026-04-01T02:59:22.531 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-mgr.y.log: gzip -5 --verbose -- /var/log/ceph/ceph.audit.log 2026-04-01T02:59:22.532 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.62492.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.62492.log.gz 2026-04-01T02:59:22.532 INFO:teuthology.orchestra.run.vm03.stderr: 94.5% -- replaced with /var/log/ceph/ceph-mgr.y.log.gz 2026-04-01T02:59:22.535 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.62756.log 2026-04-01T02:59:22.535 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph.audit.log: gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.65981.log 2026-04-01T02:59:22.537 INFO:teuthology.orchestra.run.vm03.stderr: 94.5% -- replaced with /var/log/ceph/ceph.audit.log.gz 2026-04-01T02:59:22.537 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.62756.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.62756.log.gz 2026-04-01T02:59:22.537 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66089.log 2026-04-01T02:59:22.537 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.65981.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.65981.log.gz 2026-04-01T02:59:22.538 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66161.log 2026-04-01T02:59:22.538 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66089.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66089.log.gz 2026-04-01T02:59:22.555 INFO:teuthology.orchestra.run.vm08.stderr: 91.3% -- replaced with /var/log/ceph/rgw.ceph.client.2.log.gz 2026-04-01T02:59:22.556 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66186.log 2026-04-01T02:59:22.557 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66161.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66161.log.gz 2026-04-01T02:59:22.557 INFO:teuthology.orchestra.run.vm08.stderr: 2026-04-01T02:59:22.557 INFO:teuthology.orchestra.run.vm08.stderr:real 0m0.064s 2026-04-01T02:59:22.557 INFO:teuthology.orchestra.run.vm08.stderr:user 0m0.054s 2026-04-01T02:59:22.557 INFO:teuthology.orchestra.run.vm08.stderr:sys 0m0.021s 2026-04-01T02:59:22.566 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66282.log 2026-04-01T02:59:22.567 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66186.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66186.log.gz 2026-04-01T02:59:22.572 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66332.log 2026-04-01T02:59:22.573 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66282.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66282.log.gz 2026-04-01T02:59:22.582 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66382.log 2026-04-01T02:59:22.583 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66332.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66332.log.gz 2026-04-01T02:59:22.587 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66432.log 2026-04-01T02:59:22.589 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66382.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66382.log.gz 2026-04-01T02:59:22.597 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66674.log 2026-04-01T02:59:22.598 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66432.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66432.log.gz 2026-04-01T02:59:22.604 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66664.log 2026-04-01T02:59:22.604 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66674.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66674.log.gz 2026-04-01T02:59:22.619 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66678.log 2026-04-01T02:59:22.619 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66664.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66664.log.gz 2026-04-01T02:59:22.625 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66675.log 2026-04-01T02:59:22.625 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66678.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66678.log.gz 2026-04-01T02:59:22.634 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66670.log 2026-04-01T02:59:22.635 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66675.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66675.log.gz 2026-04-01T02:59:22.640 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66661.log 2026-04-01T02:59:22.641 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66670.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66670.log.gz 2026-04-01T02:59:22.650 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66663.log 2026-04-01T02:59:22.651 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66661.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66661.log.gz 2026-04-01T02:59:22.656 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66667.log 2026-04-01T02:59:22.657 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66663.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66663.log.gz 2026-04-01T02:59:22.672 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67007.log 2026-04-01T02:59:22.672 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66667.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66667.log.gz 2026-04-01T02:59:22.686 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67021.log 2026-04-01T02:59:22.687 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67007.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67007.log.gz 2026-04-01T02:59:22.692 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67022.log 2026-04-01T02:59:22.695 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67021.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67021.log.gz 2026-04-01T02:59:22.700 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.66983.log 2026-04-01T02:59:22.701 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67022.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67022.log.gz 2026-04-01T02:59:22.707 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67017.log 2026-04-01T02:59:22.707 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.66983.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.66983.log.gz 2026-04-01T02:59:22.714 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67020.log 2026-04-01T02:59:22.715 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67017.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67017.log.gz 2026-04-01T02:59:22.734 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67128.log 2026-04-01T02:59:22.734 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67020.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67020.log.gz 2026-04-01T02:59:22.745 INFO:teuthology.orchestra.run.vm03.stderr: 92.8% -- replaced with /var/log/ceph/ceph-mon.c.log.gz 2026-04-01T02:59:22.746 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67116.log 2026-04-01T02:59:22.747 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67128.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67128.log.gz 2026-04-01T02:59:22.755 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67282.log 2026-04-01T02:59:22.756 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67116.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67116.log.gz 2026-04-01T02:59:22.770 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67330.log 2026-04-01T02:59:22.771 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67282.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67282.log.gz 2026-04-01T02:59:22.785 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67392.log 2026-04-01T02:59:22.786 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67330.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67330.log.gz 2026-04-01T02:59:22.800 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67405.log 2026-04-01T02:59:22.801 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67392.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67392.log.gz 2026-04-01T02:59:22.814 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67508.log 2026-04-01T02:59:22.815 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67405.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67405.log.gz 2026-04-01T02:59:22.824 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67512.log 2026-04-01T02:59:22.824 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67508.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67508.log.gz 2026-04-01T02:59:22.830 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67528.log 2026-04-01T02:59:22.831 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67512.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67512.log.gz 2026-04-01T02:59:22.837 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67516.log 2026-04-01T02:59:22.838 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67528.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67528.log.gz 2026-04-01T02:59:22.844 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67682.log 2026-04-01T02:59:22.845 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67516.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67516.log.gz 2026-04-01T02:59:22.852 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67732.log 2026-04-01T02:59:22.853 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67682.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67682.log.gz 2026-04-01T02:59:22.861 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67782.log 2026-04-01T02:59:22.862 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67732.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67732.log.gz 2026-04-01T02:59:22.869 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67830.log 2026-04-01T02:59:22.870 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67782.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67782.log.gz 2026-04-01T02:59:22.879 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67881.log 2026-04-01T02:59:22.879 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67830.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67830.log.gz 2026-04-01T02:59:22.885 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67929.log 2026-04-01T02:59:22.886 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67881.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67881.log.gz 2026-04-01T02:59:22.892 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.67979.log 2026-04-01T02:59:22.893 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67929.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67929.log.gz 2026-04-01T02:59:22.899 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.0.68002.log 2026-04-01T02:59:22.900 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.67979.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.67979.log.gz 2026-04-01T02:59:22.910 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.0.68118.log 2026-04-01T02:59:22.911 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.0.68002.log: 95.1% -- replaced with /var/log/ceph/ceph-client.0.68002.log.gz 2026-04-01T02:59:22.917 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.0.68221.log 2026-04-01T02:59:22.920 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.0.68118.log: 45.6% -- replaced with /var/log/ceph/ceph-client.0.68118.log.gz 2026-04-01T02:59:22.925 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.0.68324.log 2026-04-01T02:59:22.926 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.0.68221.log: gzip -5 --verbose -- /var/log/ceph/ceph-client.0.68427.log 2026-04-01T02:59:22.934 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.0.68324.log: 45.6% -- replaced with /var/log/ceph/ceph-client.0.68221.log.gz 2026-04-01T02:59:22.936 INFO:teuthology.orchestra.run.vm03.stderr: 45.6% -- replaced with /var/log/ceph/ceph-client.0.68324.log.gz 2026-04-01T02:59:22.939 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/rgw.ceph.client.0.log 2026-04-01T02:59:22.942 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.0.68427.log: 43.9% -- replaced with /var/log/ceph/ceph-client.0.68427.log.gz 2026-04-01T02:59:22.951 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ops-log-ceph-client.0.log 2026-04-01T02:59:22.956 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/rgw.ceph.client.0.log: gzip -5 --verbose -- /var/log/ceph/ceph-client.0.69408.log 2026-04-01T02:59:22.962 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ops-log-ceph-client.0.log: gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.69600.log 2026-04-01T02:59:22.963 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.0.69408.log: 84.0% -- replaced with /var/log/ceph/ceph-client.0.69408.log.gz 2026-04-01T02:59:22.978 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.69741.log 2026-04-01T02:59:22.980 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.69600.log: 83.5% -- replaced with /var/log/ceph/ceph-client.admin.69600.log.gz 2026-04-01T02:59:22.995 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.69775.log 2026-04-01T02:59:22.996 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.69741.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.69741.log.gz 2026-04-01T02:59:23.011 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.69809.log 2026-04-01T02:59:23.012 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.69775.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.69775.log.gz 2026-04-01T02:59:23.025 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.69906.log 2026-04-01T02:59:23.027 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.69809.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.69809.log.gz 2026-04-01T02:59:23.039 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.70003.log 2026-04-01T02:59:23.040 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.69906.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.69906.log.gz 2026-04-01T02:59:23.052 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.70180.log 2026-04-01T02:59:23.054 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.70003.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.70003.log.gz 2026-04-01T02:59:23.064 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.70214.log 2026-04-01T02:59:23.067 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.70180.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.70180.log.gz 2026-04-01T02:59:23.068 INFO:teuthology.orchestra.run.vm03.stderr: 91.3% -- replaced with /var/log/ceph/ceph-mon.a.log.gz 2026-04-01T02:59:23.075 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.70248.log 2026-04-01T02:59:23.076 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.70214.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.70214.log.gz 2026-04-01T02:59:23.076 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.70359.log 2026-04-01T02:59:23.080 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.70248.log: 82.5% -- replaced with /var/log/ceph/ceph-client.admin.70248.log.gz 2026-04-01T02:59:23.088 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.70457.log 2026-04-01T02:59:23.089 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.70359.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.70359.log.gz 2026-04-01T02:59:23.094 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.70554.log 2026-04-01T02:59:23.100 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.70457.log: 93.2% -- replaced with /var/log/ceph/ceph-client.admin.70457.log.gz 2026-04-01T02:59:23.101 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.70588.log 2026-04-01T02:59:23.102 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.70554.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.70554.log.gz 2026-04-01T02:59:23.115 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.70624.log 2026-04-01T02:59:23.115 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.70588.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.70588.log.gz 2026-04-01T02:59:23.129 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.70658.log 2026-04-01T02:59:23.130 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.70624.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.70624.log.gz 2026-04-01T02:59:23.136 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.71458.log 2026-04-01T02:59:23.137 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.70658.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.70658.log.gz 2026-04-01T02:59:23.142 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.71492.log 2026-04-01T02:59:23.143 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.71458.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.71458.log.gz 2026-04-01T02:59:23.150 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.71526.log 2026-04-01T02:59:23.151 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.71492.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.71492.log.gz 2026-04-01T02:59:23.159 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.71629.log 2026-04-01T02:59:23.160 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.71526.log: 82.9% -- replaced with /var/log/ceph/ceph-client.admin.71526.log.gz 2026-04-01T02:59:23.164 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.71727.log 2026-04-01T02:59:23.166 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.71629.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.71629.log.gz 2026-04-01T02:59:23.175 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.71824.log 2026-04-01T02:59:23.179 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.71727.log: gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.71858.log 2026-04-01T02:59:23.183 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.71824.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.71824.log.gz 2026-04-01T02:59:23.191 INFO:teuthology.orchestra.run.vm03.stderr: 96.8% -- replaced with /var/log/ceph/ceph-client.admin.71727.log.gz 2026-04-01T02:59:23.192 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.71893.log 2026-04-01T02:59:23.193 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.71858.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.71858.log.gz 2026-04-01T02:59:23.207 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.71927.log 2026-04-01T02:59:23.207 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.71893.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.71893.log.gz 2026-04-01T02:59:23.212 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.72022.log 2026-04-01T02:59:23.213 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.71927.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.71927.log.gz 2026-04-01T02:59:23.221 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.72056.log 2026-04-01T02:59:23.222 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.72022.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.72022.log.gz 2026-04-01T02:59:23.226 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.72090.log 2026-04-01T02:59:23.227 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.72056.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.72056.log.gz 2026-04-01T02:59:23.238 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.72193.log 2026-04-01T02:59:23.239 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.72090.log: 83.0% -- replaced with /var/log/ceph/ceph-client.admin.72090.log.gz 2026-04-01T02:59:23.244 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.72291.log 2026-04-01T02:59:23.246 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.72193.log: 84.6% -- replaced with /var/log/ceph/ceph-client.admin.72193.log.gz 2026-04-01T02:59:23.252 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.72388.log 2026-04-01T02:59:23.253 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.72291.log: 92.1% -- replaced with /var/log/ceph/ceph-client.admin.72291.log.gz 2026-04-01T02:59:23.257 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.72422.log 2026-04-01T02:59:23.260 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.72388.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.72388.log.gz 2026-04-01T02:59:23.267 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.72458.log 2026-04-01T02:59:23.268 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.72422.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.72422.log.gz 2026-04-01T02:59:23.272 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.72492.log 2026-04-01T02:59:23.274 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.72458.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.72458.log.gz 2026-04-01T02:59:23.285 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.72698.log 2026-04-01T02:59:23.286 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.72492.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.72492.log.gz 2026-04-01T02:59:23.290 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.72732.log 2026-04-01T02:59:23.292 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.72698.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.72698.log.gz 2026-04-01T02:59:23.300 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.72766.log 2026-04-01T02:59:23.301 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.72732.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.72732.log.gz 2026-04-01T02:59:23.305 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.72869.log 2026-04-01T02:59:23.308 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.72766.log: 82.9% -- replaced with /var/log/ceph/ceph-client.admin.72766.log.gz 2026-04-01T02:59:23.312 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.72967.log 2026-04-01T02:59:23.317 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.72869.log: gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.73064.log 2026-04-01T02:59:23.320 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.72967.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.72869.log.gz 2026-04-01T02:59:23.322 INFO:teuthology.orchestra.run.vm03.stderr: 89.5% -- replaced with /var/log/ceph/ceph-client.admin.72967.log.gz 2026-04-01T02:59:23.339 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.73098.log 2026-04-01T02:59:23.339 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.73064.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.73064.log.gz 2026-04-01T02:59:23.344 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.73134.log 2026-04-01T02:59:23.345 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.73098.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.73098.log.gz 2026-04-01T02:59:23.355 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.73168.log 2026-04-01T02:59:23.356 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.73134.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.73134.log.gz 2026-04-01T02:59:23.360 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.73288.log 2026-04-01T02:59:23.361 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.73168.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.73168.log.gz 2026-04-01T02:59:23.372 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.73322.log 2026-04-01T02:59:23.373 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.73288.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.73288.log.gz 2026-04-01T02:59:23.378 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.73356.log 2026-04-01T02:59:23.379 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.73322.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.73322.log.gz 2026-04-01T02:59:23.387 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.73459.log 2026-04-01T02:59:23.388 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.73356.log: 82.8% -- replaced with /var/log/ceph/ceph-client.admin.73356.log.gz 2026-04-01T02:59:23.392 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.73557.log 2026-04-01T02:59:23.393 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.73459.log: 84.9% -- replaced with /var/log/ceph/ceph-client.admin.73459.log.gz 2026-04-01T02:59:23.403 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.73654.log 2026-04-01T02:59:23.404 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.73557.log: 93.2% -- replaced with /var/log/ceph/ceph-client.admin.73557.log.gz 2026-04-01T02:59:23.409 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.73688.log 2026-04-01T02:59:23.410 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.73654.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.73654.log.gz 2026-04-01T02:59:23.419 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.73723.log 2026-04-01T02:59:23.420 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.73688.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.73688.log.gz 2026-04-01T02:59:23.424 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.73757.log 2026-04-01T02:59:23.425 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.73723.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.73723.log.gz 2026-04-01T02:59:23.436 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.74545.log 2026-04-01T02:59:23.436 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.73757.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.73757.log.gz 2026-04-01T02:59:23.440 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.74579.log 2026-04-01T02:59:23.441 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.74545.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.74545.log.gz 2026-04-01T02:59:23.451 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.74613.log 2026-04-01T02:59:23.452 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.74579.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.74579.log.gz 2026-04-01T02:59:23.456 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.74716.log 2026-04-01T02:59:23.457 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.74613.log: 83.1% -- replaced with /var/log/ceph/ceph-client.admin.74613.log.gz 2026-04-01T02:59:23.467 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.74814.log 2026-04-01T02:59:23.468 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.74716.log: 84.9% -- replaced with /var/log/ceph/ceph-client.admin.74716.log.gz 2026-04-01T02:59:23.472 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.74911.log 2026-04-01T02:59:23.475 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.74814.log: 90.3% -- replaced with /var/log/ceph/ceph-client.admin.74814.log.gz 2026-04-01T02:59:23.482 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.74945.log 2026-04-01T02:59:23.483 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.74911.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.74911.log.gz 2026-04-01T02:59:23.487 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.74981.log 2026-04-01T02:59:23.489 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.74945.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.74945.log.gz 2026-04-01T02:59:23.500 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.75015.log 2026-04-01T02:59:23.500 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.74981.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.74981.log.gz 2026-04-01T02:59:23.505 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.75049.log 2026-04-01T02:59:23.506 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.75015.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.75015.log.gz 2026-04-01T02:59:23.515 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.75146.log 2026-04-01T02:59:23.516 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.75049.log: 84.9% -- replaced with /var/log/ceph/ceph-client.admin.75049.log.gz 2026-04-01T02:59:23.521 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.75243.log 2026-04-01T02:59:23.522 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.75146.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.75146.log.gz 2026-04-01T02:59:23.532 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.75594.log 2026-04-01T02:59:23.533 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.75243.log: 84.9% -- replaced with /var/log/ceph/ceph-client.admin.75243.log.gz 2026-04-01T02:59:23.538 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.75628.log 2026-04-01T02:59:23.539 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.75594.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.75594.log.gz 2026-04-01T02:59:23.547 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.75662.log 2026-04-01T02:59:23.548 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.75628.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.75628.log.gz 2026-04-01T02:59:23.553 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.75765.log 2026-04-01T02:59:23.554 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.75662.log: 83.0% -- replaced with /var/log/ceph/ceph-client.admin.75662.log.gz 2026-04-01T02:59:23.565 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.75863.log 2026-04-01T02:59:23.565 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.75765.log: 84.9% -- replaced with /var/log/ceph/ceph-client.admin.75765.log.gz 2026-04-01T02:59:23.570 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.75960.log 2026-04-01T02:59:23.572 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.75863.log: 94.1% -- replaced with /var/log/ceph/ceph-client.admin.75863.log.gz 2026-04-01T02:59:23.582 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.75994.log 2026-04-01T02:59:23.583 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.75960.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.75960.log.gz 2026-04-01T02:59:23.587 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.76030.log 2026-04-01T02:59:23.589 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.75994.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.75994.log.gz 2026-04-01T02:59:23.596 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.76064.log 2026-04-01T02:59:23.597 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.76030.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.76030.log.gz 2026-04-01T02:59:23.602 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.76098.log 2026-04-01T02:59:23.603 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.76064.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.76064.log.gz 2026-04-01T02:59:23.613 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.76195.log 2026-04-01T02:59:23.614 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.76098.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.76098.log.gz 2026-04-01T02:59:23.620 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.76292.log 2026-04-01T02:59:23.622 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.76195.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.76195.log.gz 2026-04-01T02:59:23.627 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.76976.log 2026-04-01T02:59:23.633 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.76292.log: gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.77010.log 2026-04-01T02:59:23.634 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.76976.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.76976.log.gz 2026-04-01T02:59:23.635 INFO:teuthology.orchestra.run.vm03.stderr: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.76292.log.gz 2026-04-01T02:59:23.653 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.77044.log 2026-04-01T02:59:23.654 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.77010.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.77010.log.gz 2026-04-01T02:59:23.659 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.77147.log 2026-04-01T02:59:23.661 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.77044.log: 83.0% -- replaced with /var/log/ceph/ceph-client.admin.77044.log.gz 2026-04-01T02:59:23.668 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.77245.log 2026-04-01T02:59:23.669 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.77147.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.77147.log.gz 2026-04-01T02:59:23.673 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.77342.log 2026-04-01T02:59:23.680 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.77245.log: 96.4% -- replaced with /var/log/ceph/ceph-client.admin.77245.log.gz 2026-04-01T02:59:23.684 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.77376.log 2026-04-01T02:59:23.685 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.77342.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.77342.log.gz 2026-04-01T02:59:23.689 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.77412.log 2026-04-01T02:59:23.693 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.77376.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.77376.log.gz 2026-04-01T02:59:23.700 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.77446.log 2026-04-01T02:59:23.701 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.77412.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.77412.log.gz 2026-04-01T02:59:23.706 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.77480.log 2026-04-01T02:59:23.707 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.77446.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.77446.log.gz 2026-04-01T02:59:23.715 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.77577.log 2026-04-01T02:59:23.716 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.77480.log: 84.9% -- replaced with /var/log/ceph/ceph-client.admin.77480.log.gz 2026-04-01T02:59:23.720 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.77674.log 2026-04-01T02:59:23.723 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.77577.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.77577.log.gz 2026-04-01T02:59:23.729 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.77771.log 2026-04-01T02:59:23.733 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.77674.log: gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.78044.log 2026-04-01T02:59:23.734 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.77771.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.77771.log.gz 2026-04-01T02:59:23.735 INFO:teuthology.orchestra.run.vm03.stderr: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.77674.log.gz 2026-04-01T02:59:23.751 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.78078.log 2026-04-01T02:59:23.751 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.78044.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.78044.log.gz 2026-04-01T02:59:23.755 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.78112.log 2026-04-01T02:59:23.762 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.78078.log: gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.78216.log 2026-04-01T02:59:23.764 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.78112.log: 82.8% -- replaced with /var/log/ceph/ceph-client.admin.78112.log.gz 2026-04-01T02:59:23.765 INFO:teuthology.orchestra.run.vm03.stderr: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.78078.log.gz 2026-04-01T02:59:23.783 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.78314.log 2026-04-01T02:59:23.787 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.78216.log: gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.78411.log 2026-04-01T02:59:23.789 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.78314.log: 84.8% -- replaced with /var/log/ceph/ceph-client.admin.78216.log.gz 2026-04-01T02:59:23.791 INFO:teuthology.orchestra.run.vm03.stderr: 89.5% -- replaced with /var/log/ceph/ceph-client.admin.78314.log.gz 2026-04-01T02:59:23.807 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.78445.log 2026-04-01T02:59:23.807 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.78411.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.78411.log.gz 2026-04-01T02:59:23.812 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.78481.log 2026-04-01T02:59:23.813 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.78445.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.78445.log.gz 2026-04-01T02:59:23.822 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.78515.log 2026-04-01T02:59:23.822 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.78481.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.78481.log.gz 2026-04-01T02:59:23.828 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.78549.log 2026-04-01T02:59:23.828 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.78515.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.78515.log.gz 2026-04-01T02:59:23.839 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.78646.log 2026-04-01T02:59:23.839 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.78549.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.78549.log.gz 2026-04-01T02:59:23.846 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.78743.log 2026-04-01T02:59:23.847 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.78646.log: 85.3% -- replaced with /var/log/ceph/ceph-client.admin.78646.log.gz 2026-04-01T02:59:23.855 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.78840.log 2026-04-01T02:59:23.856 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.78743.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.78743.log.gz 2026-04-01T02:59:23.867 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.78937.log 2026-04-01T02:59:23.867 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.78840.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.78840.log.gz 2026-04-01T02:59:23.876 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.79034.log 2026-04-01T02:59:23.877 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.78937.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.78937.log.gz 2026-04-01T02:59:23.886 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.79131.log 2026-04-01T02:59:23.887 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.79034.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.79034.log.gz 2026-04-01T02:59:23.896 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.79228.log 2026-04-01T02:59:23.897 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.79131.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.79131.log.gz 2026-04-01T02:59:23.906 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.79325.log 2026-04-01T02:59:23.907 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.79228.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.79228.log.gz 2026-04-01T02:59:23.917 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.79422.log 2026-04-01T02:59:23.917 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.79325.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.79325.log.gz 2026-04-01T02:59:23.922 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.79519.log 2026-04-01T02:59:23.931 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.79422.log: 84.8% -- replaced with /var/log/ceph/ceph-client.admin.79422.log.gz 2026-04-01T02:59:23.932 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.79616.log 2026-04-01T02:59:23.933 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.79519.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.79519.log.gz 2026-04-01T02:59:23.937 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.79713.log 2026-04-01T02:59:23.948 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.79616.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.79616.log.gz 2026-04-01T02:59:23.948 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.79810.log 2026-04-01T02:59:23.948 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.79713.log: 84.9% -- replaced with /var/log/ceph/ceph-client.admin.79713.log.gz 2026-04-01T02:59:23.962 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.79907.log 2026-04-01T02:59:23.962 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.79810.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.79810.log.gz 2026-04-01T02:59:23.972 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.80004.log 2026-04-01T02:59:23.972 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.79907.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.79907.log.gz 2026-04-01T02:59:23.976 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.80101.log 2026-04-01T02:59:23.981 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.80004.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.80004.log.gz 2026-04-01T02:59:23.986 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.80198.log 2026-04-01T02:59:23.987 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.80101.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.80101.log.gz 2026-04-01T02:59:23.996 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.80295.log 2026-04-01T02:59:23.996 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.80198.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.80198.log.gz 2026-04-01T02:59:24.011 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.80392.log 2026-04-01T02:59:24.011 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.80295.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.80295.log.gz 2026-04-01T02:59:24.015 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.80489.log 2026-04-01T02:59:24.026 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.80392.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.80392.log.gz 2026-04-01T02:59:24.026 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.80586.log 2026-04-01T02:59:24.026 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.80489.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.80489.log.gz 2026-04-01T02:59:24.031 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.80683.log 2026-04-01T02:59:24.037 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.80586.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.80586.log.gz 2026-04-01T02:59:24.041 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.80780.log 2026-04-01T02:59:24.042 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.80683.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.80683.log.gz 2026-04-01T02:59:24.052 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.80877.log 2026-04-01T02:59:24.052 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.80780.log: 85.3% -- replaced with /var/log/ceph/ceph-client.admin.80780.log.gz 2026-04-01T02:59:24.062 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.80974.log 2026-04-01T02:59:24.062 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.80877.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.80877.log.gz 2026-04-01T02:59:24.066 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.81071.log 2026-04-01T02:59:24.077 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.80974.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.80974.log.gz 2026-04-01T02:59:24.077 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.81168.log 2026-04-01T02:59:24.077 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.81071.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.81071.log.gz 2026-04-01T02:59:24.083 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.81265.log 2026-04-01T02:59:24.085 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.81168.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.81168.log.gz 2026-04-01T02:59:24.092 INFO:teuthology.orchestra.run.vm03.stderr: 92.3% -- replaced with /var/log/ceph/ops-log-ceph-client.0.log.gz 2026-04-01T02:59:24.092 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.81362.log 2026-04-01T02:59:24.093 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.81265.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.81265.log.gz 2026-04-01T02:59:24.093 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.81459.log 2026-04-01T02:59:24.094 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.81362.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.81362.log.gz 2026-04-01T02:59:24.094 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.81556.log 2026-04-01T02:59:24.094 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.81459.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.81459.log.gz 2026-04-01T02:59:24.094 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.81653.log 2026-04-01T02:59:24.095 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.81556.log: 85.3% -- replaced with /var/log/ceph/ceph-client.admin.81556.log.gz 2026-04-01T02:59:24.095 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.81750.log 2026-04-01T02:59:24.095 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.81653.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.81653.log.gz 2026-04-01T02:59:24.096 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.81847.log 2026-04-01T02:59:24.096 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.81750.log: 85.3% -- replaced with /var/log/ceph/ceph-client.admin.81750.log.gz 2026-04-01T02:59:24.096 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.81944.log 2026-04-01T02:59:24.097 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.81847.log: 84.9% -- replaced with /var/log/ceph/ceph-client.admin.81847.log.gz 2026-04-01T02:59:24.097 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.82041.log 2026-04-01T02:59:24.097 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.81944.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.81944.log.gz 2026-04-01T02:59:24.097 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.82138.log 2026-04-01T02:59:24.098 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.82041.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.82041.log.gz 2026-04-01T02:59:24.098 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.82235.log 2026-04-01T02:59:24.098 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.82138.log: 85.3% -- replaced with /var/log/ceph/ceph-client.admin.82138.log.gz 2026-04-01T02:59:24.099 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.82332.log 2026-04-01T02:59:24.099 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.82235.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.82235.log.gz 2026-04-01T02:59:24.099 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.82429.log 2026-04-01T02:59:24.099 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.82332.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.82332.log.gz 2026-04-01T02:59:24.100 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.82526.log 2026-04-01T02:59:24.100 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.82429.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.82429.log.gz 2026-04-01T02:59:24.100 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.82623.log 2026-04-01T02:59:24.101 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.82526.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.82526.log.gz 2026-04-01T02:59:24.101 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.82720.log 2026-04-01T02:59:24.101 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.82623.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.82623.log.gz 2026-04-01T02:59:24.101 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.82817.log 2026-04-01T02:59:24.102 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.82720.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.82720.log.gz 2026-04-01T02:59:24.102 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.82914.log 2026-04-01T02:59:24.102 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.82817.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.82817.log.gz 2026-04-01T02:59:24.103 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.83011.log 2026-04-01T02:59:24.103 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.82914.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.82914.log.gz 2026-04-01T02:59:24.103 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.83108.log 2026-04-01T02:59:24.104 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.83011.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.83011.log.gz 2026-04-01T02:59:24.104 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.83205.log 2026-04-01T02:59:24.104 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.83108.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.83108.log.gz 2026-04-01T02:59:24.104 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.83302.log 2026-04-01T02:59:24.105 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.83205.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.83205.log.gz 2026-04-01T02:59:24.105 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.83399.log 2026-04-01T02:59:24.105 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.83302.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.83302.log.gz 2026-04-01T02:59:24.106 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.83496.log 2026-04-01T02:59:24.106 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.83399.log: 85.3% -- replaced with /var/log/ceph/ceph-client.admin.83399.log.gz 2026-04-01T02:59:24.106 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.83593.log 2026-04-01T02:59:24.107 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.83496.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.83496.log.gz 2026-04-01T02:59:24.107 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.83690.log 2026-04-01T02:59:24.107 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.83593.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.83593.log.gz 2026-04-01T02:59:24.107 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.83787.log 2026-04-01T02:59:24.108 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.83690.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.83690.log.gz 2026-04-01T02:59:24.108 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.83884.log 2026-04-01T02:59:24.108 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.83787.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.83787.log.gz 2026-04-01T02:59:24.109 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.83981.log 2026-04-01T02:59:24.109 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.83884.log: 85.2% -- replaced with /var/log/ceph/ceph-client.admin.83884.log.gz 2026-04-01T02:59:24.109 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.84078.log 2026-04-01T02:59:24.109 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.83981.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.83981.log.gz 2026-04-01T02:59:24.110 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.84175.log 2026-04-01T02:59:24.110 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.84078.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.84078.log.gz 2026-04-01T02:59:24.110 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.84272.log 2026-04-01T02:59:24.111 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.84175.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.84175.log.gz 2026-04-01T02:59:24.111 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.84369.log 2026-04-01T02:59:24.111 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.84272.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.84272.log.gz 2026-04-01T02:59:24.111 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.84466.log 2026-04-01T02:59:24.112 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.84369.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.84369.log.gz 2026-04-01T02:59:24.112 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.84563.log 2026-04-01T02:59:24.112 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.84466.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.84466.log.gz 2026-04-01T02:59:24.113 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.84660.log 2026-04-01T02:59:24.113 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.84563.log: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.84563.log.gz 2026-04-01T02:59:24.113 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.323529.log 2026-04-01T02:59:24.114 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.84660.log: 85.0% -- replaced with /var/log/ceph/ceph-client.admin.84660.log.gz 2026-04-01T02:59:24.114 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.323563.log 2026-04-01T02:59:24.114 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.323529.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.323529.log.gz 2026-04-01T02:59:24.114 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.323597.log 2026-04-01T02:59:24.115 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.323563.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.323563.log.gz 2026-04-01T02:59:24.115 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.323700.log 2026-04-01T02:59:24.115 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.323597.log: 82.8% -- replaced with /var/log/ceph/ceph-client.admin.323597.log.gz 2026-04-01T02:59:24.115 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.323800.log 2026-04-01T02:59:24.116 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.323700.log: gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.323897.log 2026-04-01T02:59:24.116 INFO:teuthology.orchestra.run.vm03.stderr: 85.1% -- replaced with /var/log/ceph/ceph-client.admin.323700.log.gz 2026-04-01T02:59:24.124 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.323800.log: gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.323931.log 2026-04-01T02:59:24.125 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.323897.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.323897.log.gz 2026-04-01T02:59:24.132 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.323967.log 2026-04-01T02:59:24.133 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.323931.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.323931.log.gz 2026-04-01T02:59:24.137 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.324001.log 2026-04-01T02:59:24.138 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.323967.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.323967.log.gz 2026-04-01T02:59:24.145 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.0.533080.log 2026-04-01T02:59:24.145 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.324001.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.324001.log.gz 2026-04-01T02:59:24.151 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.0.533289.log 2026-04-01T02:59:24.152 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.0.533080.log: 7.1% -- replaced with /var/log/ceph/ceph-client.0.533080.log.gz 2026-04-01T02:59:24.156 INFO:teuthology.orchestra.run.vm03.stderr:gzip -5 --verbose -- /var/log/ceph/ceph-client.admin.533352.log 2026-04-01T02:59:24.158 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.0.533289.log: 8.2% -- replaced with /var/log/ceph/ceph-client.0.533289.log.gz 2026-04-01T02:59:24.164 INFO:teuthology.orchestra.run.vm03.stderr:/var/log/ceph/ceph-client.admin.533352.log: 0.0% -- replaced with /var/log/ceph/ceph-client.admin.533352.log.gz 2026-04-01T02:59:24.319 INFO:teuthology.orchestra.run.vm03.stderr: 86.8% -- replaced with /var/log/ceph/ceph-client.admin.323800.log.gz 2026-04-01T02:59:52.767 INFO:teuthology.orchestra.run.vm03.stderr: 2026-04-01T02:59:52.767 INFO:teuthology.orchestra.run.vm03.stderr:gzip: /var/log/ceph/ceph-osd.3.log.gz: No space left on device 2026-04-01T02:59:52.768 INFO:teuthology.orchestra.run.vm03.stderr: 2026-04-01T02:59:52.768 INFO:teuthology.orchestra.run.vm03.stderr:gzip: /var/log/ceph/ceph-osd.1.log.gz: No space left on device 2026-04-01T02:59:52.768 INFO:teuthology.orchestra.run.vm03.stderr: 2026-04-01T02:59:52.768 INFO:teuthology.orchestra.run.vm03.stderr:gzip: /var/log/ceph/ceph-osd.0.log.gz: No space left on device 2026-04-01T02:59:52.770 INFO:teuthology.orchestra.run.vm03.stderr: 2026-04-01T02:59:52.770 INFO:teuthology.orchestra.run.vm03.stderr:gzip: /var/log/ceph/ceph-osd.2.log.gz: No space left on device 2026-04-01T02:59:59.522 INFO:teuthology.orchestra.run.vm03.stderr: 91.2% -- replaced with /var/log/ceph/rgw.ceph.client.0.log.gz 2026-04-01T02:59:59.523 INFO:teuthology.orchestra.run.vm03.stderr: 2026-04-01T02:59:59.523 INFO:teuthology.orchestra.run.vm03.stderr:real 0m37.073s 2026-04-01T02:59:59.523 INFO:teuthology.orchestra.run.vm03.stderr:user 1m55.191s 2026-04-01T02:59:59.523 INFO:teuthology.orchestra.run.vm03.stderr:sys 0m6.703s 2026-04-01T02:59:59.523 DEBUG:teuthology.orchestra.run:got remote process result: 123 2026-04-01T02:59:59.524 ERROR:teuthology.run_tasks:Manager failed: ceph Traceback (most recent call last): File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 2001, in task yield File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 160, in run_tasks suppress = manager.__exit__(*exc_info) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 552, in task with contextutil.nested(*subtasks): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 54, in nested raise exc[1] File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 364, in create_pools yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 46, in nested if exit(*exc): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 269, in start_rgw rgwadmin(ctx, client, cmd=['gc', 'process', '--include-all'], check_status=True) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/util/rgw.py", line 34, in rgwadmin proc = remote.run( File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph gc process --include-all' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 1181, in cluster yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 32, in nested yield vars File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 2011, in task ctx.managers[config['cluster']].wait_for_clean() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2919, in wait_for_clean num_active_clean = self.get_num_active_clean() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2698, in get_num_active_clean pgs = self.get_pg_stats() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 2464, in get_pg_stats out = self.raw_cluster_cmd('pg', 'dump', '--format=json') File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 1696, in raw_cluster_cmd return self.run_cluster_cmd(**kwargs).stdout.getvalue() File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph_manager.py", line 1687, in run_cluster_cmd return self.controller.run(**kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 124: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph pg dump --format=json' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 160, in run_tasks suppress = manager.__exit__(*exc_info) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 1996, in task with contextutil.nested(*subtasks): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 54, in nested raise exc[1] File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 46, in nested if exit(*exc): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/ceph.py", line 263, in ceph_log run.wait( File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 485, in wait proc.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 123: "time sudo find /var/log/ceph -name '*.log' -print0 | sudo xargs --max-args=1 --max-procs=0 --verbose -0 --no-run-if-empty -- gzip -5 --verbose --" 2026-04-01T02:59:59.524 DEBUG:teuthology.run_tasks:Unwinding manager install 2026-04-01T02:59:59.526 ERROR:teuthology.contextutil:Saw exception from nested tasks Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 32, in nested yield vars File "/home/teuthos/kshtsk/teuthology/teuthology/task/install/__init__.py", line 644, in task yield File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 160, in run_tasks suppress = manager.__exit__(*exc_info) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 552, in task with contextutil.nested(*subtasks): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 54, in nested raise exc[1] File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 364, in create_pools yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 46, in nested if exit(*exc): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 269, in start_rgw rgwadmin(ctx, client, cmd=['gc', 'process', '--include-all'], check_status=True) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/util/rgw.py", line 34, in rgwadmin proc = remote.run( File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph gc process --include-all' 2026-04-01T02:59:59.527 INFO:teuthology.task.install.util:Removing shipped files: /home/ubuntu/cephtest/valgrind.supp /usr/bin/daemon-helper /usr/bin/adjust-ulimits /usr/bin/stdin-killer... 2026-04-01T02:59:59.527 DEBUG:teuthology.orchestra.run.vm03:> sudo rm -f -- /home/ubuntu/cephtest/valgrind.supp /usr/bin/daemon-helper /usr/bin/adjust-ulimits /usr/bin/stdin-killer 2026-04-01T02:59:59.566 DEBUG:teuthology.orchestra.run.vm06:> sudo rm -f -- /home/ubuntu/cephtest/valgrind.supp /usr/bin/daemon-helper /usr/bin/adjust-ulimits /usr/bin/stdin-killer 2026-04-01T02:59:59.567 DEBUG:teuthology.orchestra.run.vm08:> sudo rm -f -- /home/ubuntu/cephtest/valgrind.supp /usr/bin/daemon-helper /usr/bin/adjust-ulimits /usr/bin/stdin-killer 2026-04-01T02:59:59.598 INFO:teuthology.task.install.rpm:Removing packages: ceph-radosgw, ceph-test, ceph, ceph-base, cephadm, ceph-immutable-object-cache, ceph-mgr, ceph-mgr-dashboard, ceph-mgr-diskprediction-local, ceph-mgr-rook, ceph-mgr-cephadm, ceph-fuse, ceph-volume, librados-devel, libcephfs2, libcephfs-devel, librados2, librbd1, python3-rados, python3-rgw, python3-cephfs, python3-rbd, rbd-fuse, rbd-mirror, rbd-nbd on rpm system. 2026-04-01T02:59:59.598 DEBUG:teuthology.orchestra.run.vm03:> 2026-04-01T02:59:59.598 DEBUG:teuthology.orchestra.run.vm03:> for d in ceph-radosgw ceph-test ceph ceph-base cephadm ceph-immutable-object-cache ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-cephadm ceph-fuse ceph-volume librados-devel libcephfs2 libcephfs-devel librados2 librbd1 python3-rados python3-rgw python3-cephfs python3-rbd rbd-fuse rbd-mirror rbd-nbd ; do 2026-04-01T02:59:59.598 DEBUG:teuthology.orchestra.run.vm03:> sudo yum -y remove $d || true 2026-04-01T02:59:59.598 DEBUG:teuthology.orchestra.run.vm03:> done 2026-04-01T02:59:59.603 INFO:teuthology.task.install.rpm:Removing packages: ceph-radosgw, ceph-test, ceph, ceph-base, cephadm, ceph-immutable-object-cache, ceph-mgr, ceph-mgr-dashboard, ceph-mgr-diskprediction-local, ceph-mgr-rook, ceph-mgr-cephadm, ceph-fuse, ceph-volume, librados-devel, libcephfs2, libcephfs-devel, librados2, librbd1, python3-rados, python3-rgw, python3-cephfs, python3-rbd, rbd-fuse, rbd-mirror, rbd-nbd on rpm system. 2026-04-01T02:59:59.603 DEBUG:teuthology.orchestra.run.vm06:> 2026-04-01T02:59:59.603 DEBUG:teuthology.orchestra.run.vm06:> for d in ceph-radosgw ceph-test ceph ceph-base cephadm ceph-immutable-object-cache ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-cephadm ceph-fuse ceph-volume librados-devel libcephfs2 libcephfs-devel librados2 librbd1 python3-rados python3-rgw python3-cephfs python3-rbd rbd-fuse rbd-mirror rbd-nbd ; do 2026-04-01T02:59:59.603 DEBUG:teuthology.orchestra.run.vm06:> sudo yum -y remove $d || true 2026-04-01T02:59:59.603 DEBUG:teuthology.orchestra.run.vm06:> done 2026-04-01T02:59:59.609 INFO:teuthology.task.install.rpm:Removing packages: ceph-radosgw, ceph-test, ceph, ceph-base, cephadm, ceph-immutable-object-cache, ceph-mgr, ceph-mgr-dashboard, ceph-mgr-diskprediction-local, ceph-mgr-rook, ceph-mgr-cephadm, ceph-fuse, ceph-volume, librados-devel, libcephfs2, libcephfs-devel, librados2, librbd1, python3-rados, python3-rgw, python3-cephfs, python3-rbd, rbd-fuse, rbd-mirror, rbd-nbd on rpm system. 2026-04-01T02:59:59.610 DEBUG:teuthology.orchestra.run.vm08:> 2026-04-01T02:59:59.610 DEBUG:teuthology.orchestra.run.vm08:> for d in ceph-radosgw ceph-test ceph ceph-base cephadm ceph-immutable-object-cache ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-cephadm ceph-fuse ceph-volume librados-devel libcephfs2 libcephfs-devel librados2 librbd1 python3-rados python3-rgw python3-cephfs python3-rbd rbd-fuse rbd-mirror rbd-nbd ; do 2026-04-01T02:59:59.610 DEBUG:teuthology.orchestra.run.vm08:> sudo yum -y remove $d || true 2026-04-01T02:59:59.610 DEBUG:teuthology.orchestra.run.vm08:> done 2026-04-01T02:59:59.808 INFO:teuthology.orchestra.run.vm08.stdout:Dependencies resolved. 2026-04-01T02:59:59.808 INFO:teuthology.orchestra.run.vm08.stdout:================================================================================ 2026-04-01T02:59:59.808 INFO:teuthology.orchestra.run.vm08.stdout: Package Arch Version Repo Size 2026-04-01T02:59:59.808 INFO:teuthology.orchestra.run.vm08.stdout:================================================================================ 2026-04-01T02:59:59.808 INFO:teuthology.orchestra.run.vm08.stdout:Removing: 2026-04-01T02:59:59.808 INFO:teuthology.orchestra.run.vm08.stdout: ceph-radosgw x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 103 M 2026-04-01T02:59:59.808 INFO:teuthology.orchestra.run.vm08.stdout:Removing unused dependencies: 2026-04-01T02:59:59.808 INFO:teuthology.orchestra.run.vm08.stdout: mailcap noarch 2.1.49-5.el9.0.2 @baseos 78 k 2026-04-01T02:59:59.808 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:59:59.808 INFO:teuthology.orchestra.run.vm08.stdout:Transaction Summary 2026-04-01T02:59:59.808 INFO:teuthology.orchestra.run.vm08.stdout:================================================================================ 2026-04-01T02:59:59.808 INFO:teuthology.orchestra.run.vm08.stdout:Remove 2 Packages 2026-04-01T02:59:59.808 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:59:59.809 INFO:teuthology.orchestra.run.vm08.stdout:Freed space: 103 M 2026-04-01T02:59:59.809 INFO:teuthology.orchestra.run.vm08.stdout:Running transaction check 2026-04-01T02:59:59.811 INFO:teuthology.orchestra.run.vm08.stdout:Transaction check succeeded. 2026-04-01T02:59:59.811 INFO:teuthology.orchestra.run.vm08.stdout:Running transaction test 2026-04-01T02:59:59.822 INFO:teuthology.orchestra.run.vm08.stdout:Transaction test succeeded. 2026-04-01T02:59:59.823 INFO:teuthology.orchestra.run.vm08.stdout:Running transaction 2026-04-01T02:59:59.823 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T02:59:59.853 INFO:teuthology.orchestra.run.vm08.stdout: Preparing : 1/1 2026-04-01T02:59:59.875 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 1/2 2026-04-01T02:59:59.875 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T02:59:59.875 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-radosgw@*.service" escaped as "ceph-radosgw@\x2a.service". 2026-04-01T02:59:59.875 INFO:teuthology.orchestra.run.vm08.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-radosgw.target". 2026-04-01T02:59:59.875 INFO:teuthology.orchestra.run.vm08.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-radosgw.target". 2026-04-01T02:59:59.875 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T02:59:59.882 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 1/2 2026-04-01T02:59:59.891 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 1/2 2026-04-01T02:59:59.907 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : mailcap-2.1.49-5.el9.0.2.noarch 2/2 2026-04-01T02:59:59.952 INFO:teuthology.orchestra.run.vm03.stdout:Dependencies resolved. 2026-04-01T02:59:59.953 INFO:teuthology.orchestra.run.vm03.stdout:================================================================================ 2026-04-01T02:59:59.953 INFO:teuthology.orchestra.run.vm03.stdout: Package Arch Version Repo Size 2026-04-01T02:59:59.953 INFO:teuthology.orchestra.run.vm03.stdout:================================================================================ 2026-04-01T02:59:59.953 INFO:teuthology.orchestra.run.vm03.stdout:Removing: 2026-04-01T02:59:59.953 INFO:teuthology.orchestra.run.vm03.stdout: ceph-radosgw x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 103 M 2026-04-01T02:59:59.953 INFO:teuthology.orchestra.run.vm03.stdout:Removing unused dependencies: 2026-04-01T02:59:59.953 INFO:teuthology.orchestra.run.vm03.stdout: mailcap noarch 2.1.49-5.el9.0.2 @baseos 78 k 2026-04-01T02:59:59.953 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:59:59.953 INFO:teuthology.orchestra.run.vm03.stdout:Transaction Summary 2026-04-01T02:59:59.953 INFO:teuthology.orchestra.run.vm03.stdout:================================================================================ 2026-04-01T02:59:59.953 INFO:teuthology.orchestra.run.vm03.stdout:Remove 2 Packages 2026-04-01T02:59:59.953 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T02:59:59.953 INFO:teuthology.orchestra.run.vm03.stdout:Freed space: 103 M 2026-04-01T02:59:59.953 INFO:teuthology.orchestra.run.vm03.stdout:Running transaction check 2026-04-01T02:59:59.954 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T02:59:59.958 INFO:teuthology.orchestra.run.vm03.stdout:Transaction check succeeded. 2026-04-01T02:59:59.958 INFO:teuthology.orchestra.run.vm03.stdout:Running transaction test 2026-04-01T02:59:59.970 INFO:teuthology.orchestra.run.vm03.stdout:Transaction test succeeded. 2026-04-01T02:59:59.970 INFO:teuthology.orchestra.run.vm03.stdout:Running transaction 2026-04-01T02:59:59.979 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: mailcap-2.1.49-5.el9.0.2.noarch 2/2 2026-04-01T02:59:59.979 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 1/2 2026-04-01T03:00:00.001 INFO:teuthology.orchestra.run.vm03.stdout: Preparing : 1/1 2026-04-01T03:00:00.024 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 1/2 2026-04-01T03:00:00.024 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T03:00:00.024 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-radosgw@*.service" escaped as "ceph-radosgw@\x2a.service". 2026-04-01T03:00:00.024 INFO:teuthology.orchestra.run.vm03.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-radosgw.target". 2026-04-01T03:00:00.024 INFO:teuthology.orchestra.run.vm03.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-radosgw.target". 2026-04-01T03:00:00.024 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:00.028 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 1/2 2026-04-01T03:00:00.037 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 1/2 2026-04-01T03:00:00.053 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : mailcap-2.1.49-5.el9.0.2.noarch 2/2 2026-04-01T03:00:00.063 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : mailcap-2.1.49-5.el9.0.2.noarch 2/2 2026-04-01T03:00:00.063 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:00.063 INFO:teuthology.orchestra.run.vm08.stdout:Removed: 2026-04-01T03:00:00.063 INFO:teuthology.orchestra.run.vm08.stdout: ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T03:00:00.063 INFO:teuthology.orchestra.run.vm08.stdout: mailcap-2.1.49-5.el9.0.2.noarch 2026-04-01T03:00:00.063 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:00.063 INFO:teuthology.orchestra.run.vm08.stdout:Complete! 2026-04-01T03:00:00.075 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:00.206 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:00.224 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: mailcap-2.1.49-5.el9.0.2.noarch 2/2 2026-04-01T03:00:00.224 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 1/2 2026-04-01T03:00:00.278 INFO:teuthology.orchestra.run.vm08.stdout:Dependencies resolved. 2026-04-01T03:00:00.278 INFO:teuthology.orchestra.run.vm08.stdout:================================================================================ 2026-04-01T03:00:00.278 INFO:teuthology.orchestra.run.vm08.stdout: Package Arch Version Repository Size 2026-04-01T03:00:00.278 INFO:teuthology.orchestra.run.vm08.stdout:================================================================================ 2026-04-01T03:00:00.278 INFO:teuthology.orchestra.run.vm08.stdout:Removing: 2026-04-01T03:00:00.278 INFO:teuthology.orchestra.run.vm08.stdout: ceph-test x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 365 M 2026-04-01T03:00:00.278 INFO:teuthology.orchestra.run.vm08.stdout:Removing unused dependencies: 2026-04-01T03:00:00.278 INFO:teuthology.orchestra.run.vm08.stdout: socat x86_64 1.7.4.1-8.el9 @appstream 1.1 M 2026-04-01T03:00:00.278 INFO:teuthology.orchestra.run.vm08.stdout: xmlstarlet x86_64 1.6.1-20.el9 @appstream 195 k 2026-04-01T03:00:00.278 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:00.278 INFO:teuthology.orchestra.run.vm08.stdout:Transaction Summary 2026-04-01T03:00:00.278 INFO:teuthology.orchestra.run.vm08.stdout:================================================================================ 2026-04-01T03:00:00.278 INFO:teuthology.orchestra.run.vm08.stdout:Remove 3 Packages 2026-04-01T03:00:00.278 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:00.279 INFO:teuthology.orchestra.run.vm08.stdout:Freed space: 366 M 2026-04-01T03:00:00.279 INFO:teuthology.orchestra.run.vm08.stdout:Running transaction check 2026-04-01T03:00:00.281 INFO:teuthology.orchestra.run.vm08.stdout:Transaction check succeeded. 2026-04-01T03:00:00.281 INFO:teuthology.orchestra.run.vm08.stdout:Running transaction test 2026-04-01T03:00:00.300 INFO:teuthology.orchestra.run.vm08.stdout:Transaction test succeeded. 2026-04-01T03:00:00.300 INFO:teuthology.orchestra.run.vm08.stdout:Running transaction 2026-04-01T03:00:00.314 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : mailcap-2.1.49-5.el9.0.2.noarch 2/2 2026-04-01T03:00:00.314 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:00.314 INFO:teuthology.orchestra.run.vm03.stdout:Removed: 2026-04-01T03:00:00.314 INFO:teuthology.orchestra.run.vm03.stdout: ceph-radosgw-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T03:00:00.314 INFO:teuthology.orchestra.run.vm03.stdout: mailcap-2.1.49-5.el9.0.2.noarch 2026-04-01T03:00:00.314 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:00.314 INFO:teuthology.orchestra.run.vm03.stdout:Complete! 2026-04-01T03:00:00.339 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:00.368 INFO:teuthology.orchestra.run.vm08.stdout: Preparing : 1/1 2026-04-01T03:00:00.375 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-test-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 1/3 2026-04-01T03:00:00.377 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : socat-1.7.4.1-8.el9.x86_64 2/3 2026-04-01T03:00:00.391 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : xmlstarlet-1.6.1-20.el9.x86_64 3/3 2026-04-01T03:00:00.450 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: xmlstarlet-1.6.1-20.el9.x86_64 3/3 2026-04-01T03:00:00.450 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-test-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 1/3 2026-04-01T03:00:00.450 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : socat-1.7.4.1-8.el9.x86_64 2/3 2026-04-01T03:00:00.465 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:00.494 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : xmlstarlet-1.6.1-20.el9.x86_64 3/3 2026-04-01T03:00:00.494 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:00.494 INFO:teuthology.orchestra.run.vm08.stdout:Removed: 2026-04-01T03:00:00.494 INFO:teuthology.orchestra.run.vm08.stdout: ceph-test-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 socat-1.7.4.1-8.el9.x86_64 2026-04-01T03:00:00.494 INFO:teuthology.orchestra.run.vm08.stdout: xmlstarlet-1.6.1-20.el9.x86_64 2026-04-01T03:00:00.494 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:00.494 INFO:teuthology.orchestra.run.vm08.stdout:Complete! 2026-04-01T03:00:00.514 INFO:teuthology.orchestra.run.vm03.stdout:Dependencies resolved. 2026-04-01T03:00:00.515 INFO:teuthology.orchestra.run.vm03.stdout:================================================================================ 2026-04-01T03:00:00.515 INFO:teuthology.orchestra.run.vm03.stdout: Package Arch Version Repository Size 2026-04-01T03:00:00.515 INFO:teuthology.orchestra.run.vm03.stdout:================================================================================ 2026-04-01T03:00:00.515 INFO:teuthology.orchestra.run.vm03.stdout:Removing: 2026-04-01T03:00:00.515 INFO:teuthology.orchestra.run.vm03.stdout: ceph-test x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 365 M 2026-04-01T03:00:00.515 INFO:teuthology.orchestra.run.vm03.stdout:Removing unused dependencies: 2026-04-01T03:00:00.515 INFO:teuthology.orchestra.run.vm03.stdout: socat x86_64 1.7.4.1-8.el9 @appstream 1.1 M 2026-04-01T03:00:00.515 INFO:teuthology.orchestra.run.vm03.stdout: xmlstarlet x86_64 1.6.1-20.el9 @appstream 195 k 2026-04-01T03:00:00.515 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:00.515 INFO:teuthology.orchestra.run.vm03.stdout:Transaction Summary 2026-04-01T03:00:00.515 INFO:teuthology.orchestra.run.vm03.stdout:================================================================================ 2026-04-01T03:00:00.515 INFO:teuthology.orchestra.run.vm03.stdout:Remove 3 Packages 2026-04-01T03:00:00.515 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:00.515 INFO:teuthology.orchestra.run.vm03.stdout:Freed space: 366 M 2026-04-01T03:00:00.515 INFO:teuthology.orchestra.run.vm03.stdout:Running transaction check 2026-04-01T03:00:00.518 INFO:teuthology.orchestra.run.vm03.stdout:Transaction check succeeded. 2026-04-01T03:00:00.518 INFO:teuthology.orchestra.run.vm03.stdout:Running transaction test 2026-04-01T03:00:00.538 INFO:teuthology.orchestra.run.vm03.stdout:Transaction test succeeded. 2026-04-01T03:00:00.538 INFO:teuthology.orchestra.run.vm03.stdout:Running transaction 2026-04-01T03:00:00.590 INFO:teuthology.orchestra.run.vm03.stdout: Preparing : 1/1 2026-04-01T03:00:00.590 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:00.598 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-test-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 1/3 2026-04-01T03:00:00.602 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : socat-1.7.4.1-8.el9.x86_64 2/3 2026-04-01T03:00:00.617 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : xmlstarlet-1.6.1-20.el9.x86_64 3/3 2026-04-01T03:00:00.685 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: xmlstarlet-1.6.1-20.el9.x86_64 3/3 2026-04-01T03:00:00.685 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-test-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 1/3 2026-04-01T03:00:00.685 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : socat-1.7.4.1-8.el9.x86_64 2/3 2026-04-01T03:00:00.687 INFO:teuthology.orchestra.run.vm08.stdout:Dependencies resolved. 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout:================================================================================ 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout: Package Arch Version Repository Size 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout:================================================================================ 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout:Removing: 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout: ceph x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 0 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout:Removing unused dependencies: 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mds x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 6.8 M 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mon x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 19 M 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout: lua x86_64 5.4.4-4.el9 @appstream 593 k 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout: lua-devel x86_64 5.4.4-4.el9 @crb 49 k 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout: luarocks noarch 3.9.2-5.el9 @epel 692 k 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout: unzip x86_64 6.0-59.el9 @baseos 389 k 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout: zip x86_64 3.0-35.el9 @baseos 724 k 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout:Transaction Summary 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout:================================================================================ 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout:Remove 8 Packages 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout:Freed space: 28 M 2026-04-01T03:00:00.688 INFO:teuthology.orchestra.run.vm08.stdout:Running transaction check 2026-04-01T03:00:00.691 INFO:teuthology.orchestra.run.vm08.stdout:Transaction check succeeded. 2026-04-01T03:00:00.691 INFO:teuthology.orchestra.run.vm08.stdout:Running transaction test 2026-04-01T03:00:00.709 INFO:teuthology.orchestra.run.vm08.stdout:Transaction test succeeded. 2026-04-01T03:00:00.709 INFO:teuthology.orchestra.run.vm08.stdout:Running transaction 2026-04-01T03:00:00.725 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:00.733 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : xmlstarlet-1.6.1-20.el9.x86_64 3/3 2026-04-01T03:00:00.733 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:00.733 INFO:teuthology.orchestra.run.vm03.stdout:Removed: 2026-04-01T03:00:00.733 INFO:teuthology.orchestra.run.vm03.stdout: ceph-test-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 socat-1.7.4.1-8.el9.x86_64 2026-04-01T03:00:00.733 INFO:teuthology.orchestra.run.vm03.stdout: xmlstarlet-1.6.1-20.el9.x86_64 2026-04-01T03:00:00.733 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:00.733 INFO:teuthology.orchestra.run.vm03.stdout:Complete! 2026-04-01T03:00:00.749 INFO:teuthology.orchestra.run.vm08.stdout: Preparing : 1/1 2026-04-01T03:00:00.754 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 1/8 2026-04-01T03:00:00.757 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : luarocks-3.9.2-5.el9.noarch 2/8 2026-04-01T03:00:00.759 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : lua-devel-5.4.4-4.el9.x86_64 3/8 2026-04-01T03:00:00.762 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : zip-3.0-35.el9.x86_64 4/8 2026-04-01T03:00:00.764 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : unzip-6.0-59.el9.x86_64 5/8 2026-04-01T03:00:00.766 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : lua-5.4.4-4.el9.x86_64 6/8 2026-04-01T03:00:00.785 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 7/8 2026-04-01T03:00:00.785 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T03:00:00.785 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-mds@*.service" escaped as "ceph-mds@\x2a.service". 2026-04-01T03:00:00.785 INFO:teuthology.orchestra.run.vm08.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-mds.target". 2026-04-01T03:00:00.785 INFO:teuthology.orchestra.run.vm08.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-mds.target". 2026-04-01T03:00:00.785 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:00.786 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 7/8 2026-04-01T03:00:00.794 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 7/8 2026-04-01T03:00:00.837 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 8/8 2026-04-01T03:00:00.837 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T03:00:00.837 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-mon@*.service" escaped as "ceph-mon@\x2a.service". 2026-04-01T03:00:00.837 INFO:teuthology.orchestra.run.vm08.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-mon.target". 2026-04-01T03:00:00.837 INFO:teuthology.orchestra.run.vm08.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-mon.target". 2026-04-01T03:00:00.837 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:00.839 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 8/8 2026-04-01T03:00:00.847 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:00.924 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 8/8 2026-04-01T03:00:00.924 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 1/8 2026-04-01T03:00:00.924 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2/8 2026-04-01T03:00:00.924 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 3/8 2026-04-01T03:00:00.924 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : lua-5.4.4-4.el9.x86_64 4/8 2026-04-01T03:00:00.924 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : lua-devel-5.4.4-4.el9.x86_64 5/8 2026-04-01T03:00:00.924 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : luarocks-3.9.2-5.el9.noarch 6/8 2026-04-01T03:00:00.924 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : unzip-6.0-59.el9.x86_64 7/8 2026-04-01T03:00:00.932 INFO:teuthology.orchestra.run.vm03.stdout:Dependencies resolved. 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout:================================================================================ 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout: Package Arch Version Repository Size 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout:================================================================================ 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout:Removing: 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout: ceph x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 0 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout:Removing unused dependencies: 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mds x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 6.8 M 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mon x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 19 M 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout: lua x86_64 5.4.4-4.el9 @appstream 593 k 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout: lua-devel x86_64 5.4.4-4.el9 @crb 49 k 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout: luarocks noarch 3.9.2-5.el9 @epel 692 k 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout: unzip x86_64 6.0-59.el9 @baseos 389 k 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout: zip x86_64 3.0-35.el9 @baseos 724 k 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout:Transaction Summary 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout:================================================================================ 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout:Remove 8 Packages 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout:Freed space: 28 M 2026-04-01T03:00:00.933 INFO:teuthology.orchestra.run.vm03.stdout:Running transaction check 2026-04-01T03:00:00.936 INFO:teuthology.orchestra.run.vm03.stdout:Transaction check succeeded. 2026-04-01T03:00:00.936 INFO:teuthology.orchestra.run.vm03.stdout:Running transaction test 2026-04-01T03:00:00.960 INFO:teuthology.orchestra.run.vm03.stdout:Transaction test succeeded. 2026-04-01T03:00:00.961 INFO:teuthology.orchestra.run.vm03.stdout:Running transaction 2026-04-01T03:00:00.972 INFO:teuthology.orchestra.run.vm08.stdout: Verifying : zip-3.0-35.el9.x86_64 8/8 2026-04-01T03:00:00.972 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:00.972 INFO:teuthology.orchestra.run.vm08.stdout:Removed: 2026-04-01T03:00:00.972 INFO:teuthology.orchestra.run.vm08.stdout: ceph-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T03:00:00.973 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T03:00:00.973 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T03:00:00.973 INFO:teuthology.orchestra.run.vm08.stdout: lua-5.4.4-4.el9.x86_64 2026-04-01T03:00:00.973 INFO:teuthology.orchestra.run.vm08.stdout: lua-devel-5.4.4-4.el9.x86_64 2026-04-01T03:00:00.973 INFO:teuthology.orchestra.run.vm08.stdout: luarocks-3.9.2-5.el9.noarch 2026-04-01T03:00:00.973 INFO:teuthology.orchestra.run.vm08.stdout: unzip-6.0-59.el9.x86_64 2026-04-01T03:00:00.973 INFO:teuthology.orchestra.run.vm08.stdout: zip-3.0-35.el9.x86_64 2026-04-01T03:00:00.973 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:00.973 INFO:teuthology.orchestra.run.vm08.stdout:Complete! 2026-04-01T03:00:00.976 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:01.000 INFO:teuthology.orchestra.run.vm03.stdout: Preparing : 1/1 2026-04-01T03:00:01.006 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 1/8 2026-04-01T03:00:01.011 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : luarocks-3.9.2-5.el9.noarch 2/8 2026-04-01T03:00:01.012 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : lua-devel-5.4.4-4.el9.x86_64 3/8 2026-04-01T03:00:01.015 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : zip-3.0-35.el9.x86_64 4/8 2026-04-01T03:00:01.018 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : unzip-6.0-59.el9.x86_64 5/8 2026-04-01T03:00:01.021 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : lua-5.4.4-4.el9.x86_64 6/8 2026-04-01T03:00:01.041 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 7/8 2026-04-01T03:00:01.041 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T03:00:01.041 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-mds@*.service" escaped as "ceph-mds@\x2a.service". 2026-04-01T03:00:01.041 INFO:teuthology.orchestra.run.vm03.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-mds.target". 2026-04-01T03:00:01.042 INFO:teuthology.orchestra.run.vm03.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-mds.target". 2026-04-01T03:00:01.042 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:01.042 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 7/8 2026-04-01T03:00:01.049 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 7/8 2026-04-01T03:00:01.069 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 8/8 2026-04-01T03:00:01.069 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T03:00:01.069 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-mon@*.service" escaped as "ceph-mon@\x2a.service". 2026-04-01T03:00:01.069 INFO:teuthology.orchestra.run.vm03.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-mon.target". 2026-04-01T03:00:01.069 INFO:teuthology.orchestra.run.vm03.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-mon.target". 2026-04-01T03:00:01.069 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:01.070 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 8/8 2026-04-01T03:00:01.114 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:01.169 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 8/8 2026-04-01T03:00:01.170 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 1/8 2026-04-01T03:00:01.170 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2/8 2026-04-01T03:00:01.170 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 3/8 2026-04-01T03:00:01.170 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : lua-5.4.4-4.el9.x86_64 4/8 2026-04-01T03:00:01.170 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : lua-devel-5.4.4-4.el9.x86_64 5/8 2026-04-01T03:00:01.170 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : luarocks-3.9.2-5.el9.noarch 6/8 2026-04-01T03:00:01.170 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : unzip-6.0-59.el9.x86_64 7/8 2026-04-01T03:00:01.179 INFO:teuthology.orchestra.run.vm08.stdout:Dependencies resolved. 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout:=================================================================================================================== 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: Package Arch Version Repository Size 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout:=================================================================================================================== 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout:Removing: 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: ceph-base x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 24 M 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout:Removing dependent packages: 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: ceph-immutable-object-cache x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 447 k 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 2.9 M 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-cephadm noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 938 k 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-dashboard noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 148 M 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-diskprediction-local noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 66 M 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-rook noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 567 k 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: ceph-osd x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 54 M 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: ceph-volume noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 1.4 M 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: rbd-mirror x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 11 M 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout:Removing unused dependencies: 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: abseil-cpp x86_64 20211102.0-4.el9 @epel 1.9 M 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: c-ares x86_64 1.19.1-2.el9_4 @baseos 279 k 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: ceph-common x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 98 M 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: ceph-grafana-dashboards noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 990 k 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-k8sevents noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 60 k 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: ceph-mgr-modules-core noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 1.6 M 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: ceph-prometheus-alerts noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 57 k 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: ceph-selinux x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 138 k 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: cryptsetup x86_64 2.7.2-4.el9 @baseos 722 k 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: flexiblas x86_64 3.0.4-8.el9.0.1 @appstream 68 k 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: flexiblas-netlib x86_64 3.0.4-8.el9.0.1 @appstream 11 M 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: flexiblas-openblas-openmp x86_64 3.0.4-8.el9.0.1 @appstream 39 k 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: gperftools-libs x86_64 2.9.1-3.el9 @epel 1.4 M 2026-04-01T03:00:01.186 INFO:teuthology.orchestra.run.vm08.stdout: grpc-data noarch 1.46.7-10.el9 @epel 13 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: ledmon-libs x86_64 1.1.0-3.el9 @baseos 80 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: libcephsqlite x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 409 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: libconfig x86_64 1.7.2-9.el9 @baseos 220 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: libgfortran x86_64 11.5.0-11.el9 @baseos 2.8 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: liboath x86_64 2.6.12-1.el9 @epel 94 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: libquadmath x86_64 11.5.0-11.el9 @baseos 330 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: libradosstriper1 x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 792 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: libstoragemgmt x86_64 1.10.1-1.el9 @appstream 685 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: libunwind x86_64 1.6.2-1.el9 @epel 170 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: libxslt x86_64 1.1.34-13.el9_6 @appstream 751 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: nvme-cli x86_64 2.13-1.el9 @baseos 6.8 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: openblas x86_64 0.3.29-1.el9 @appstream 112 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: openblas-openmp x86_64 0.3.29-1.el9 @appstream 46 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: pciutils x86_64 3.7.0-7.el9 @baseos 216 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: protobuf x86_64 3.14.0-17.el9_7 @appstream 3.5 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: protobuf-compiler x86_64 3.14.0-17.el9_7 @crb 2.9 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-asyncssh noarch 2.13.2-5.el9 @epel 3.9 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-autocommand noarch 2.2.2-8.el9 @epel 82 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-babel noarch 2.9.1-2.el9 @appstream 27 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-backports-tarfile noarch 1.2.0-1.el9 @epel 254 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-bcrypt x86_64 3.2.2-1.el9 @epel 87 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-cachetools noarch 4.2.4-1.el9 @epel 93 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-ceph-common x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 816 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-certifi noarch 2023.05.07-4.el9 @epel 6.3 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-cffi x86_64 1.14.5-5.el9 @baseos 1.0 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-chardet noarch 4.0.0-5.el9 @77d52b2cce1347aa9f3fc60d8b93d222 1.4 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-cheroot noarch 10.0.1-5.el9 @epel 682 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-cherrypy noarch 18.10.0-5.el9 @epel 1.0 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-cryptography x86_64 36.0.1-5.el9_6 @baseos 4.5 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-devel x86_64 3.9.23-2.el9 @appstream 765 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-google-auth noarch 1:2.45.0-1.el9 @epel 1.4 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-grpcio x86_64 1.46.7-10.el9 @epel 6.7 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-grpcio-tools x86_64 1.46.7-10.el9 @epel 418 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-idna noarch 2.10-7.el9_4.1 @77d52b2cce1347aa9f3fc60d8b93d222 513 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-influxdb noarch 5.3.1-1.el9 @epel 747 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-isodate noarch 0.6.1-3.el9 @epel 203 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco noarch 8.2.1-3.el9 @epel 3.7 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-classes noarch 3.2.1-5.el9 @epel 24 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-collections noarch 3.0.0-8.el9 @epel 55 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-context noarch 6.0.1-3.el9 @epel 31 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-functools noarch 3.5.0-2.el9 @epel 33 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-jaraco-text noarch 4.0.0-2.el9 @epel 51 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-jinja2 noarch 2.11.3-8.el9_5 @appstream 1.1 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-jsonpatch noarch 1.21-16.el9 @0d57cd3fe20446e8b1c08da162742194 55 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-jsonpointer noarch 2.0-4.el9.0.1 @0d57cd3fe20446e8b1c08da162742194 34 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-kubernetes noarch 1:26.1.0-3.el9 @epel 21 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-libstoragemgmt x86_64 1.10.1-1.el9 @appstream 832 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-lxml x86_64 4.6.5-3.el9 @appstream 4.2 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-markupsafe x86_64 1.1.1-12.el9 @appstream 60 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-more-itertools noarch 8.12.0-2.el9 @epel 378 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-msgpack x86_64 1.0.3-2.el9 @epel 264 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-natsort noarch 7.1.1-5.el9 @epel 215 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-numpy x86_64 1:1.23.5-2.el9_7 @appstream 30 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-numpy-f2py x86_64 1:1.23.5-2.el9_7 @appstream 1.7 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-oauthlib noarch 3.1.1-5.el9 @0d57cd3fe20446e8b1c08da162742194 888 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-packaging noarch 20.9-5.el9 @appstream 248 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-ply noarch 3.11-14.el9.0.1 @baseos 430 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-portend noarch 3.1.0-2.el9 @epel 20 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-prettytable noarch 0.7.2-27.el9.0.1 @0d57cd3fe20446e8b1c08da162742194 166 k 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-protobuf noarch 3.14.0-17.el9_7 @appstream 1.4 M 2026-04-01T03:00:01.187 INFO:teuthology.orchestra.run.vm08.stdout: python3-pyOpenSSL noarch 21.0.0-1.el9 @epel 389 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-pyasn1 noarch 0.4.8-7.el9_7 @appstream 622 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-pyasn1-modules noarch 0.4.8-7.el9_7 @appstream 1.0 M 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-pycparser noarch 2.20-6.el9 @baseos 745 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-pyparsing noarch 2.4.7-9.el9.0.1 @baseos 635 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-pysocks noarch 1.7.1-12.el9.0.1 @77d52b2cce1347aa9f3fc60d8b93d222 88 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-pytz noarch 2021.1-5.el9 @0d57cd3fe20446e8b1c08da162742194 176 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-repoze-lru noarch 0.7-16.el9 @epel 83 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-requests noarch 2.25.1-10.el9_6 @baseos 405 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-requests-oauthlib noarch 1.3.0-12.el9 @appstream 119 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-routes noarch 2.5.1-5.el9 @epel 459 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-rsa noarch 4.9-2.el9 @epel 202 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-saml noarch 1.16.0-1.el9 @epel 730 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-scipy x86_64 1.9.3-2.el9 @appstream 72 M 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-tempora noarch 5.0.0-2.el9 @epel 96 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-toml noarch 0.10.2-6.el9.0.1 @appstream 99 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-typing-extensions noarch 4.15.0-1.el9 @epel 447 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-urllib3 noarch 1.26.5-6.el9_7.1 @baseos 746 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-websocket-client noarch 1.2.3-2.el9 @epel 319 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-xmlsec x86_64 1.3.13-1.el9 @epel 158 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: python3-zc-lockfile noarch 2.0-10.el9 @epel 35 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: qatlib x86_64 24.09.0-1.el9 @appstream 588 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: qatlib-service x86_64 24.09.0-1.el9 @appstream 64 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: qatzip-libs x86_64 1.3.1-1.el9 @appstream 148 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: smartmontools x86_64 1:7.2-9.el9 @baseos 1.9 M 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: xmlsec1 x86_64 1.2.29-13.el9 @appstream 596 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: xmlsec1-openssl x86_64 1.2.29-13.el9 @appstream 281 k 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout:Transaction Summary 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout:=================================================================================================================== 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout:Remove 111 Packages 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout:Freed space: 687 M 2026-04-01T03:00:01.188 INFO:teuthology.orchestra.run.vm08.stdout:Running transaction check 2026-04-01T03:00:01.213 INFO:teuthology.orchestra.run.vm08.stdout:Transaction check succeeded. 2026-04-01T03:00:01.213 INFO:teuthology.orchestra.run.vm08.stdout:Running transaction test 2026-04-01T03:00:01.215 INFO:teuthology.orchestra.run.vm03.stdout: Verifying : zip-3.0-35.el9.x86_64 8/8 2026-04-01T03:00:01.215 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:01.215 INFO:teuthology.orchestra.run.vm03.stdout:Removed: 2026-04-01T03:00:01.215 INFO:teuthology.orchestra.run.vm03.stdout: ceph-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T03:00:01.215 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mds-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T03:00:01.215 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mon-2:20.2.0-8.g0597158282e.el9.clyso.x86_64 2026-04-01T03:00:01.215 INFO:teuthology.orchestra.run.vm03.stdout: lua-5.4.4-4.el9.x86_64 2026-04-01T03:00:01.215 INFO:teuthology.orchestra.run.vm03.stdout: lua-devel-5.4.4-4.el9.x86_64 2026-04-01T03:00:01.215 INFO:teuthology.orchestra.run.vm03.stdout: luarocks-3.9.2-5.el9.noarch 2026-04-01T03:00:01.215 INFO:teuthology.orchestra.run.vm03.stdout: unzip-6.0-59.el9.x86_64 2026-04-01T03:00:01.215 INFO:teuthology.orchestra.run.vm03.stdout: zip-3.0-35.el9.x86_64 2026-04-01T03:00:01.215 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:01.215 INFO:teuthology.orchestra.run.vm03.stdout:Complete! 2026-04-01T03:00:01.242 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:01.322 INFO:teuthology.orchestra.run.vm08.stdout:Transaction test succeeded. 2026-04-01T03:00:01.322 INFO:teuthology.orchestra.run.vm08.stdout:Running transaction 2026-04-01T03:00:01.363 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:01.404 INFO:teuthology.orchestra.run.vm03.stdout:Dependencies resolved. 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout:=================================================================================================================== 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: Package Arch Version Repository Size 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout:=================================================================================================================== 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout:Removing: 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: ceph-base x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 24 M 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout:Removing dependent packages: 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: ceph-immutable-object-cache x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 447 k 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 2.9 M 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-cephadm noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 938 k 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-dashboard noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 148 M 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-diskprediction-local noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 66 M 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-rook noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 567 k 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: ceph-osd x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 54 M 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: ceph-volume noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 1.4 M 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: rbd-mirror x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 11 M 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout:Removing unused dependencies: 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: abseil-cpp x86_64 20211102.0-4.el9 @epel 1.9 M 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: c-ares x86_64 1.19.1-2.el9_4 @baseos 279 k 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: ceph-common x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 98 M 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: ceph-grafana-dashboards noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 990 k 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-k8sevents noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 60 k 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: ceph-mgr-modules-core noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 1.6 M 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: ceph-prometheus-alerts noarch 2:20.2.0-8.g0597158282e.el9.clyso @ceph-noarch 57 k 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: ceph-selinux x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 138 k 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: cryptsetup x86_64 2.7.2-4.el9 @baseos 722 k 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: flexiblas x86_64 3.0.4-8.el9.0.1 @appstream 68 k 2026-04-01T03:00:01.410 INFO:teuthology.orchestra.run.vm03.stdout: flexiblas-netlib x86_64 3.0.4-8.el9.0.1 @appstream 11 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: flexiblas-openblas-openmp x86_64 3.0.4-8.el9.0.1 @appstream 39 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: gperftools-libs x86_64 2.9.1-3.el9 @epel 1.4 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: grpc-data noarch 1.46.7-10.el9 @epel 13 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: ledmon-libs x86_64 1.1.0-3.el9 @baseos 80 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: libcephsqlite x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 409 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: libconfig x86_64 1.7.2-9.el9 @baseos 220 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: libgfortran x86_64 11.5.0-11.el9 @baseos 2.8 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: liboath x86_64 2.6.12-1.el9 @epel 94 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: libquadmath x86_64 11.5.0-11.el9 @baseos 330 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: libradosstriper1 x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 792 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: libstoragemgmt x86_64 1.10.1-1.el9 @appstream 685 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: libunwind x86_64 1.6.2-1.el9 @epel 170 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: libxslt x86_64 1.1.34-13.el9_6 @appstream 751 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: nvme-cli x86_64 2.13-1.el9 @baseos 6.8 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: openblas x86_64 0.3.29-1.el9 @appstream 112 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: openblas-openmp x86_64 0.3.29-1.el9 @appstream 46 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: pciutils x86_64 3.7.0-7.el9 @baseos 216 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: protobuf x86_64 3.14.0-17.el9_7 @appstream 3.5 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: protobuf-compiler x86_64 3.14.0-17.el9_7 @crb 2.9 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-asyncssh noarch 2.13.2-5.el9 @epel 3.9 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-autocommand noarch 2.2.2-8.el9 @epel 82 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-babel noarch 2.9.1-2.el9 @appstream 27 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-backports-tarfile noarch 1.2.0-1.el9 @epel 254 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-bcrypt x86_64 3.2.2-1.el9 @epel 87 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-cachetools noarch 4.2.4-1.el9 @epel 93 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-ceph-common x86_64 2:20.2.0-8.g0597158282e.el9.clyso @ceph 816 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-certifi noarch 2023.05.07-4.el9 @epel 6.3 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-cffi x86_64 1.14.5-5.el9 @baseos 1.0 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-chardet noarch 4.0.0-5.el9 @77d52b2cce1347aa9f3fc60d8b93d222 1.4 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-cheroot noarch 10.0.1-5.el9 @epel 682 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-cherrypy noarch 18.10.0-5.el9 @epel 1.0 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-cryptography x86_64 36.0.1-5.el9_6 @baseos 4.5 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-devel x86_64 3.9.23-2.el9 @appstream 765 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-google-auth noarch 1:2.45.0-1.el9 @epel 1.4 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-grpcio x86_64 1.46.7-10.el9 @epel 6.7 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-grpcio-tools x86_64 1.46.7-10.el9 @epel 418 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-idna noarch 2.10-7.el9_4.1 @77d52b2cce1347aa9f3fc60d8b93d222 513 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-influxdb noarch 5.3.1-1.el9 @epel 747 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-isodate noarch 0.6.1-3.el9 @epel 203 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco noarch 8.2.1-3.el9 @epel 3.7 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-classes noarch 3.2.1-5.el9 @epel 24 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-collections noarch 3.0.0-8.el9 @epel 55 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-context noarch 6.0.1-3.el9 @epel 31 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-functools noarch 3.5.0-2.el9 @epel 33 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-jaraco-text noarch 4.0.0-2.el9 @epel 51 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-jinja2 noarch 2.11.3-8.el9_5 @appstream 1.1 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-jsonpatch noarch 1.21-16.el9 @0d57cd3fe20446e8b1c08da162742194 55 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-jsonpointer noarch 2.0-4.el9.0.1 @0d57cd3fe20446e8b1c08da162742194 34 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-kubernetes noarch 1:26.1.0-3.el9 @epel 21 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-libstoragemgmt x86_64 1.10.1-1.el9 @appstream 832 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-lxml x86_64 4.6.5-3.el9 @appstream 4.2 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-markupsafe x86_64 1.1.1-12.el9 @appstream 60 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-more-itertools noarch 8.12.0-2.el9 @epel 378 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-msgpack x86_64 1.0.3-2.el9 @epel 264 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-natsort noarch 7.1.1-5.el9 @epel 215 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-numpy x86_64 1:1.23.5-2.el9_7 @appstream 30 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-numpy-f2py x86_64 1:1.23.5-2.el9_7 @appstream 1.7 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-oauthlib noarch 3.1.1-5.el9 @0d57cd3fe20446e8b1c08da162742194 888 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-packaging noarch 20.9-5.el9 @appstream 248 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-ply noarch 3.11-14.el9.0.1 @baseos 430 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-portend noarch 3.1.0-2.el9 @epel 20 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-prettytable noarch 0.7.2-27.el9.0.1 @0d57cd3fe20446e8b1c08da162742194 166 k 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-protobuf noarch 3.14.0-17.el9_7 @appstream 1.4 M 2026-04-01T03:00:01.411 INFO:teuthology.orchestra.run.vm03.stdout: python3-pyOpenSSL noarch 21.0.0-1.el9 @epel 389 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-pyasn1 noarch 0.4.8-7.el9_7 @appstream 622 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-pyasn1-modules noarch 0.4.8-7.el9_7 @appstream 1.0 M 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-pycparser noarch 2.20-6.el9 @baseos 745 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-pyparsing noarch 2.4.7-9.el9.0.1 @baseos 635 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-pysocks noarch 1.7.1-12.el9.0.1 @77d52b2cce1347aa9f3fc60d8b93d222 88 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-pytz noarch 2021.1-5.el9 @0d57cd3fe20446e8b1c08da162742194 176 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-repoze-lru noarch 0.7-16.el9 @epel 83 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-requests noarch 2.25.1-10.el9_6 @baseos 405 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-requests-oauthlib noarch 1.3.0-12.el9 @appstream 119 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-routes noarch 2.5.1-5.el9 @epel 459 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-rsa noarch 4.9-2.el9 @epel 202 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-saml noarch 1.16.0-1.el9 @epel 730 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-scipy x86_64 1.9.3-2.el9 @appstream 72 M 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-tempora noarch 5.0.0-2.el9 @epel 96 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-toml noarch 0.10.2-6.el9.0.1 @appstream 99 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-typing-extensions noarch 4.15.0-1.el9 @epel 447 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-urllib3 noarch 1.26.5-6.el9_7.1 @baseos 746 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-websocket-client noarch 1.2.3-2.el9 @epel 319 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-xmlsec x86_64 1.3.13-1.el9 @epel 158 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: python3-zc-lockfile noarch 2.0-10.el9 @epel 35 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: qatlib x86_64 24.09.0-1.el9 @appstream 588 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: qatlib-service x86_64 24.09.0-1.el9 @appstream 64 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: qatzip-libs x86_64 1.3.1-1.el9 @appstream 148 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: smartmontools x86_64 1:7.2-9.el9 @baseos 1.9 M 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: xmlsec1 x86_64 1.2.29-13.el9 @appstream 596 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: xmlsec1-openssl x86_64 1.2.29-13.el9 @appstream 281 k 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout:Transaction Summary 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout:=================================================================================================================== 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout:Remove 111 Packages 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout:Freed space: 687 M 2026-04-01T03:00:01.412 INFO:teuthology.orchestra.run.vm03.stdout:Running transaction check 2026-04-01T03:00:01.435 INFO:teuthology.orchestra.run.vm03.stdout:Transaction check succeeded. 2026-04-01T03:00:01.435 INFO:teuthology.orchestra.run.vm03.stdout:Running transaction test 2026-04-01T03:00:01.469 INFO:teuthology.orchestra.run.vm08.stdout: Preparing : 1/1 2026-04-01T03:00:01.469 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso. 1/111 2026-04-01T03:00:01.476 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso. 1/111 2026-04-01T03:00:01.483 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:01.494 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 2/111 2026-04-01T03:00:01.494 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T03:00:01.494 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-mgr@*.service" escaped as "ceph-mgr@\x2a.service". 2026-04-01T03:00:01.494 INFO:teuthology.orchestra.run.vm08.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-mgr.target". 2026-04-01T03:00:01.494 INFO:teuthology.orchestra.run.vm08.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-mgr.target". 2026-04-01T03:00:01.494 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:01.495 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 2/111 2026-04-01T03:00:01.507 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 2/111 2026-04-01T03:00:01.552 INFO:teuthology.orchestra.run.vm03.stdout:Transaction test succeeded. 2026-04-01T03:00:01.552 INFO:teuthology.orchestra.run.vm03.stdout:Running transaction 2026-04-01T03:00:01.598 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-mgr-modules-core-2:20.2.0-8.g0597158282e.el 3/111 2026-04-01T03:00:01.598 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.c 4/111 2026-04-01T03:00:01.602 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:01.618 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.c 4/111 2026-04-01T03:00:01.623 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-influxdb-5.3.1-1.el9.noarch 5/111 2026-04-01T03:00:01.624 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.cly 6/111 2026-04-01T03:00:01.635 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.cly 6/111 2026-04-01T03:00:01.641 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-cherrypy-18.10.0-5.el9.noarch 7/111 2026-04-01T03:00:01.645 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-cheroot-10.0.1-5.el9.noarch 8/111 2026-04-01T03:00:01.653 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-grpcio-tools-1.46.7-10.el9.x86_64 9/111 2026-04-01T03:00:01.657 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-grpcio-1.46.7-10.el9.x86_64 10/111 2026-04-01T03:00:01.676 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 11/111 2026-04-01T03:00:01.676 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T03:00:01.676 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-osd@*.service" escaped as "ceph-osd@\x2a.service". 2026-04-01T03:00:01.676 INFO:teuthology.orchestra.run.vm08.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-osd.target". 2026-04-01T03:00:01.676 INFO:teuthology.orchestra.run.vm08.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-osd.target". 2026-04-01T03:00:01.676 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:01.681 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 11/111 2026-04-01T03:00:01.690 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 11/111 2026-04-01T03:00:01.703 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.no 12/111 2026-04-01T03:00:01.703 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T03:00:01.703 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-volume@*.service" escaped as "ceph-volume@\x2a.service". 2026-04-01T03:00:01.703 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:01.711 INFO:teuthology.orchestra.run.vm03.stdout: Preparing : 1/1 2026-04-01T03:00:01.712 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso. 1/111 2026-04-01T03:00:01.712 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.no 12/111 2026-04-01T03:00:01.719 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mgr-rook-2:20.2.0-8.g0597158282e.el9.clyso. 1/111 2026-04-01T03:00:01.720 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:01.721 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.no 12/111 2026-04-01T03:00:01.723 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-jaraco-collections-3.0.0-8.el9.noarch 13/111 2026-04-01T03:00:01.729 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-jaraco-text-4.0.0-2.el9.noarch 14/111 2026-04-01T03:00:01.734 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-jinja2-2.11.3-8.el9_5.noarch 15/111 2026-04-01T03:00:01.737 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 2/111 2026-04-01T03:00:01.737 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T03:00:01.737 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-mgr@*.service" escaped as "ceph-mgr@\x2a.service". 2026-04-01T03:00:01.737 INFO:teuthology.orchestra.run.vm03.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-mgr.target". 2026-04-01T03:00:01.737 INFO:teuthology.orchestra.run.vm03.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-mgr.target". 2026-04-01T03:00:01.737 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:01.738 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 2/111 2026-04-01T03:00:01.750 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mgr-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 2/111 2026-04-01T03:00:01.763 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-saml-1.16.0-1.el9.noarch 16/111 2026-04-01T03:00:01.770 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-babel-2.9.1-2.el9.noarch 17/111 2026-04-01T03:00:01.773 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-jaraco-classes-3.2.1-5.el9.noarch 18/111 2026-04-01T03:00:01.782 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-pyOpenSSL-21.0.0-1.el9.noarch 19/111 2026-04-01T03:00:01.792 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-asyncssh-2.13.2-5.el9.noarch 20/111 2026-04-01T03:00:01.792 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-mgr-diskprediction-local-2:20.2.0-8.g059715 21/111 2026-04-01T03:00:01.798 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mgr-diskprediction-local-2:20.2.0-8.g059715 21/111 2026-04-01T03:00:01.839 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:01.843 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-mgr-modules-core-2:20.2.0-8.g0597158282e.el 3/111 2026-04-01T03:00:01.843 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.c 4/111 2026-04-01T03:00:01.864 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mgr-dashboard-2:20.2.0-8.g0597158282e.el9.c 4/111 2026-04-01T03:00:01.869 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-influxdb-5.3.1-1.el9.noarch 5/111 2026-04-01T03:00:01.869 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.cly 6/111 2026-04-01T03:00:01.887 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-jsonpatch-1.21-16.el9.noarch 22/111 2026-04-01T03:00:01.907 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mgr-cephadm-2:20.2.0-8.g0597158282e.el9.cly 6/111 2026-04-01T03:00:01.935 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-scipy-1.9.3-2.el9.x86_64 23/111 2026-04-01T03:00:01.943 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-cherrypy-18.10.0-5.el9.noarch 7/111 2026-04-01T03:00:01.946 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-xmlsec-1.3.13-1.el9.x86_64 24/111 2026-04-01T03:00:01.947 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-cheroot-10.0.1-5.el9.noarch 8/111 2026-04-01T03:00:01.949 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-lxml-4.6.5-3.el9.x86_64 25/111 2026-04-01T03:00:01.956 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-grpcio-tools-1.46.7-10.el9.x86_64 9/111 2026-04-01T03:00:01.958 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:01.960 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-grpcio-1.46.7-10.el9.x86_64 10/111 2026-04-01T03:00:01.961 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 26/111 2026-04-01T03:00:01.961 INFO:teuthology.orchestra.run.vm08.stdout:Removed "/etc/systemd/system/multi-user.target.wants/libstoragemgmt.service". 2026-04-01T03:00:01.961 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:01.962 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : libstoragemgmt-1.10.1-1.el9.x86_64 26/111 2026-04-01T03:00:01.980 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 11/111 2026-04-01T03:00:01.980 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T03:00:01.980 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-osd@*.service" escaped as "ceph-osd@\x2a.service". 2026-04-01T03:00:01.980 INFO:teuthology.orchestra.run.vm03.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-osd.target". 2026-04-01T03:00:01.980 INFO:teuthology.orchestra.run.vm03.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-osd.target". 2026-04-01T03:00:01.980 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:01.983 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 11/111 2026-04-01T03:00:01.988 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 26/111 2026-04-01T03:00:01.992 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-osd-2:20.2.0-8.g0597158282e.el9.clyso.x86_6 11/111 2026-04-01T03:00:01.993 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-numpy-f2py-1:1.23.5-2.el9_7.x86_64 27/111 2026-04-01T03:00:01.995 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : xmlsec1-openssl-1.2.29-13.el9.x86_64 28/111 2026-04-01T03:00:02.007 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : xmlsec1-1.2.29-13.el9.x86_64 29/111 2026-04-01T03:00:02.008 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.no 12/111 2026-04-01T03:00:02.008 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T03:00:02.008 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-volume@*.service" escaped as "ceph-volume@\x2a.service". 2026-04-01T03:00:02.008 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:02.012 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-cryptography-36.0.1-5.el9_6.x86_64 30/111 2026-04-01T03:00:02.015 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : protobuf-compiler-3.14.0-17.el9_7.x86_64 31/111 2026-04-01T03:00:02.017 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.no 12/111 2026-04-01T03:00:02.018 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-bcrypt-3.2.2-1.el9.x86_64 32/111 2026-04-01T03:00:02.026 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-volume-2:20.2.0-8.g0597158282e.el9.clyso.no 12/111 2026-04-01T03:00:02.029 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-jaraco-collections-3.0.0-8.el9.noarch 13/111 2026-04-01T03:00:02.034 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-jaraco-text-4.0.0-2.el9.noarch 14/111 2026-04-01T03:00:02.036 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86 33/111 2026-04-01T03:00:02.036 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T03:00:02.036 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-rbd-mirror@*.service" escaped as "ceph-rbd-mirror@\x2a.service". 2026-04-01T03:00:02.036 INFO:teuthology.orchestra.run.vm08.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-rbd-mirror.target". 2026-04-01T03:00:02.036 INFO:teuthology.orchestra.run.vm08.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-rbd-mirror.target". 2026-04-01T03:00:02.036 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:02.037 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86 33/111 2026-04-01T03:00:02.038 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-jinja2-2.11.3-8.el9_5.noarch 15/111 2026-04-01T03:00:02.045 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86 33/111 2026-04-01T03:00:02.048 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-jaraco-context-6.0.1-3.el9.noarch 34/111 2026-04-01T03:00:02.051 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-packaging-20.9-5.el9.noarch 35/111 2026-04-01T03:00:02.054 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-portend-3.1.0-2.el9.noarch 36/111 2026-04-01T03:00:02.057 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-tempora-5.0.0-2.el9.noarch 37/111 2026-04-01T03:00:02.060 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-jaraco-functools-3.5.0-2.el9.noarch 38/111 2026-04-01T03:00:02.063 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-routes-2.5.1-5.el9.noarch 39/111 2026-04-01T03:00:02.063 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.c 40/111 2026-04-01T03:00:02.070 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-saml-1.16.0-1.el9.noarch 16/111 2026-04-01T03:00:02.077 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:02.077 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-babel-2.9.1-2.el9.noarch 17/111 2026-04-01T03:00:02.081 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-jaraco-classes-3.2.1-5.el9.noarch 18/111 2026-04-01T03:00:02.090 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-pyOpenSSL-21.0.0-1.el9.noarch 19/111 2026-04-01T03:00:02.098 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-asyncssh-2.13.2-5.el9.noarch 20/111 2026-04-01T03:00:02.098 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-mgr-diskprediction-local-2:20.2.0-8.g059715 21/111 2026-04-01T03:00:02.105 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mgr-diskprediction-local-2:20.2.0-8.g059715 21/111 2026-04-01T03:00:02.113 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.c 40/111 2026-04-01T03:00:02.124 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-kubernetes-1:26.1.0-3.el9.noarch 41/111 2026-04-01T03:00:02.128 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-requests-oauthlib-1.3.0-12.el9.noarch 42/111 2026-04-01T03:00:02.137 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-requests-2.25.1-10.el9_6.noarch 43/111 2026-04-01T03:00:02.142 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-google-auth-1:2.45.0-1.el9.noarch 44/111 2026-04-01T03:00:02.151 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-rsa-4.9-2.el9.noarch 45/111 2026-04-01T03:00:02.157 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-pyasn1-modules-0.4.8-7.el9_7.noarch 46/111 2026-04-01T03:00:02.162 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-urllib3-1.26.5-6.el9_7.1.noarch 47/111 2026-04-01T03:00:02.166 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-cffi-1.14.5-5.el9.x86_64 48/111 2026-04-01T03:00:02.195 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:02.199 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-jsonpatch-1.21-16.el9.noarch 22/111 2026-04-01T03:00:02.210 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-pycparser-2.20-6.el9.noarch 49/111 2026-04-01T03:00:02.218 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-scipy-1.9.3-2.el9.x86_64 23/111 2026-04-01T03:00:02.222 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-numpy-1:1.23.5-2.el9_7.x86_64 50/111 2026-04-01T03:00:02.225 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-xmlsec-1.3.13-1.el9.x86_64 24/111 2026-04-01T03:00:02.225 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : flexiblas-netlib-3.0.4-8.el9.0.1.x86_64 51/111 2026-04-01T03:00:02.229 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-lxml-4.6.5-3.el9.x86_64 25/111 2026-04-01T03:00:02.230 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : flexiblas-openblas-openmp-3.0.4-8.el9.0.1.x86_64 52/111 2026-04-01T03:00:02.232 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : openblas-openmp-0.3.29-1.el9.x86_64 53/111 2026-04-01T03:00:02.236 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : libgfortran-11.5.0-11.el9.x86_64 54/111 2026-04-01T03:00:02.239 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-libstoragemgmt-1.10.1-1.el9.x86_64 55/111 2026-04-01T03:00:02.244 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 26/111 2026-04-01T03:00:02.244 INFO:teuthology.orchestra.run.vm03.stdout:Removed "/etc/systemd/system/multi-user.target.wants/libstoragemgmt.service". 2026-04-01T03:00:02.244 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:02.245 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : libstoragemgmt-1.10.1-1.el9.x86_64 26/111 2026-04-01T03:00:02.259 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-8.g05971582 56/111 2026-04-01T03:00:02.259 INFO:teuthology.orchestra.run.vm08.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T03:00:02.259 INFO:teuthology.orchestra.run.vm08.stdout:Invalid unit name "ceph-immutable-object-cache@*.service" escaped as "ceph-immutable-object-cache@\x2a.service". 2026-04-01T03:00:02.259 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:02.259 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-immutable-object-cache-2:20.2.0-8.g05971582 56/111 2026-04-01T03:00:02.267 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-8.g05971582 56/111 2026-04-01T03:00:02.269 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : openblas-0.3.29-1.el9.x86_64 57/111 2026-04-01T03:00:02.271 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : flexiblas-3.0.4-8.el9.0.1.x86_64 58/111 2026-04-01T03:00:02.275 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-ply-3.11-14.el9.0.1.noarch 59/111 2026-04-01T03:00:02.277 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 26/111 2026-04-01T03:00:02.278 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-idna-2.10-7.el9_4.1.noarch 60/111 2026-04-01T03:00:02.282 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-numpy-f2py-1:1.23.5-2.el9_7.x86_64 27/111 2026-04-01T03:00:02.283 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-pysocks-1.7.1-12.el9.0.1.noarch 61/111 2026-04-01T03:00:02.284 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : xmlsec1-openssl-1.2.29-13.el9.x86_64 28/111 2026-04-01T03:00:02.287 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-pyasn1-0.4.8-7.el9_7.noarch 62/111 2026-04-01T03:00:02.293 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-cachetools-4.2.4-1.el9.noarch 63/111 2026-04-01T03:00:02.298 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : xmlsec1-1.2.29-13.el9.x86_64 29/111 2026-04-01T03:00:02.301 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-chardet-4.0.0-5.el9.noarch 64/111 2026-04-01T03:00:02.303 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-cryptography-36.0.1-5.el9_6.x86_64 30/111 2026-04-01T03:00:02.306 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : protobuf-compiler-3.14.0-17.el9_7.x86_64 31/111 2026-04-01T03:00:02.307 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-oauthlib-3.1.1-5.el9.noarch 65/111 2026-04-01T03:00:02.309 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-bcrypt-3.2.2-1.el9.x86_64 32/111 2026-04-01T03:00:02.310 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-websocket-client-1.2.3-2.el9.noarch 66/111 2026-04-01T03:00:02.313 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-certifi-2023.05.07-4.el9.noarch 67/111 2026-04-01T03:00:02.315 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-repoze-lru-0.7-16.el9.noarch 68/111 2026-04-01T03:00:02.318 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-jaraco-8.2.1-3.el9.noarch 69/111 2026-04-01T03:00:02.318 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:02.321 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-more-itertools-8.12.0-2.el9.noarch 70/111 2026-04-01T03:00:02.324 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-toml-0.10.2-6.el9.0.1.noarch 71/111 2026-04-01T03:00:02.326 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-pytz-2021.1-5.el9.noarch 72/111 2026-04-01T03:00:02.329 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-pyparsing-2.4.7-9.el9.0.1.noarch 73/111 2026-04-01T03:00:02.330 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86 33/111 2026-04-01T03:00:02.330 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T03:00:02.330 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-rbd-mirror@*.service" escaped as "ceph-rbd-mirror@\x2a.service". 2026-04-01T03:00:02.330 INFO:teuthology.orchestra.run.vm03.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-rbd-mirror.target". 2026-04-01T03:00:02.330 INFO:teuthology.orchestra.run.vm03.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-rbd-mirror.target". 2026-04-01T03:00:02.330 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:02.331 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86 33/111 2026-04-01T03:00:02.337 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-backports-tarfile-1.2.0-1.el9.noarch 74/111 2026-04-01T03:00:02.339 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: rbd-mirror-2:20.2.0-8.g0597158282e.el9.clyso.x86 33/111 2026-04-01T03:00:02.341 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-devel-3.9.23-2.el9.x86_64 75/111 2026-04-01T03:00:02.342 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-jaraco-context-6.0.1-3.el9.noarch 34/111 2026-04-01T03:00:02.343 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-jsonpointer-2.0-4.el9.0.1.noarch 76/111 2026-04-01T03:00:02.345 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-packaging-20.9-5.el9.noarch 35/111 2026-04-01T03:00:02.347 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-typing-extensions-4.15.0-1.el9.noarch 77/111 2026-04-01T03:00:02.348 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-portend-3.1.0-2.el9.noarch 36/111 2026-04-01T03:00:02.350 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-isodate-0.6.1-3.el9.noarch 78/111 2026-04-01T03:00:02.351 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-tempora-5.0.0-2.el9.noarch 37/111 2026-04-01T03:00:02.352 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-autocommand-2.2.2-8.el9.noarch 79/111 2026-04-01T03:00:02.354 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-jaraco-functools-3.5.0-2.el9.noarch 38/111 2026-04-01T03:00:02.357 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-routes-2.5.1-5.el9.noarch 39/111 2026-04-01T03:00:02.357 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.c 40/111 2026-04-01T03:00:02.358 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : grpc-data-1.46.7-10.el9.noarch 80/111 2026-04-01T03:00:02.361 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-protobuf-3.14.0-17.el9_7.noarch 81/111 2026-04-01T03:00:02.365 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-zc-lockfile-2.0-10.el9.noarch 82/111 2026-04-01T03:00:02.368 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-natsort-7.1.1-5.el9.noarch 83/111 2026-04-01T03:00:02.370 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-grafana-dashboards-2:20.2.0-8.g0597158282e. 84/111 2026-04-01T03:00:02.372 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-prometheus-alerts-2:20.2.0-8.g0597158282e.e 85/111 2026-04-01T03:00:02.391 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 86/111 2026-04-01T03:00:02.391 INFO:teuthology.orchestra.run.vm08.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph.target". 2026-04-01T03:00:02.391 INFO:teuthology.orchestra.run.vm08.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-crash.service". 2026-04-01T03:00:02.391 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:02.399 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 86/111 2026-04-01T03:00:02.399 INFO:teuthology.orchestra.run.vm08.stdout:warning: file /etc/logrotate.d/ceph: remove failed: No such file or directory 2026-04-01T03:00:02.399 INFO:teuthology.orchestra.run.vm08.stdout: 2026-04-01T03:00:02.411 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-mgr-k8sevents-2:20.2.0-8.g0597158282e.el9.c 40/111 2026-04-01T03:00:02.423 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-kubernetes-1:26.1.0-3.el9.noarch 41/111 2026-04-01T03:00:02.427 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-requests-oauthlib-1.3.0-12.el9.noarch 42/111 2026-04-01T03:00:02.428 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 86/111 2026-04-01T03:00:02.428 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 87/111 2026-04-01T03:00:02.437 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-requests-2.25.1-10.el9_6.noarch 43/111 2026-04-01T03:00:02.441 INFO:teuthology.orchestra.run.vm08.stdout: Running scriptlet: ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 87/111 2026-04-01T03:00:02.441 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-google-auth-1:2.45.0-1.el9.noarch 44/111 2026-04-01T03:00:02.444 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:02.446 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : qatzip-libs-1.3.1-1.el9.x86_64 88/111 2026-04-01T03:00:02.450 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-ceph-common-2:20.2.0-8.g0597158282e.el9. 89/111 2026-04-01T03:00:02.452 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : python3-prettytable-0.7.2-27.el9.0.1.noarch 90/111 2026-04-01T03:00:02.452 INFO:teuthology.orchestra.run.vm08.stdout: Erasing : ceph-selinux-2:20.2.0-8.g0597158282e.el9.clyso.x 91/111 2026-04-01T03:00:02.452 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-rsa-4.9-2.el9.noarch 45/111 2026-04-01T03:00:02.459 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-pyasn1-modules-0.4.8-7.el9_7.noarch 46/111 2026-04-01T03:00:02.464 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-urllib3-1.26.5-6.el9_7.1.noarch 47/111 2026-04-01T03:00:02.469 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-cffi-1.14.5-5.el9.x86_64 48/111 2026-04-01T03:00:02.517 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-pycparser-2.20-6.el9.noarch 49/111 2026-04-01T03:00:02.528 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-numpy-1:1.23.5-2.el9_7.x86_64 50/111 2026-04-01T03:00:02.531 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : flexiblas-netlib-3.0.4-8.el9.0.1.x86_64 51/111 2026-04-01T03:00:02.534 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : flexiblas-openblas-openmp-3.0.4-8.el9.0.1.x86_64 52/111 2026-04-01T03:00:02.536 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : openblas-openmp-0.3.29-1.el9.x86_64 53/111 2026-04-01T03:00:02.540 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : libgfortran-11.5.0-11.el9.x86_64 54/111 2026-04-01T03:00:02.542 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-libstoragemgmt-1.10.1-1.el9.x86_64 55/111 2026-04-01T03:00:02.560 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-8.g05971582 56/111 2026-04-01T03:00:02.561 INFO:teuthology.orchestra.run.vm03.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-04-01T03:00:02.561 INFO:teuthology.orchestra.run.vm03.stdout:Invalid unit name "ceph-immutable-object-cache@*.service" escaped as "ceph-immutable-object-cache@\x2a.service". 2026-04-01T03:00:02.561 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:02.561 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-immutable-object-cache-2:20.2.0-8.g05971582 56/111 2026-04-01T03:00:02.568 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:02.568 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-8.g05971582 56/111 2026-04-01T03:00:02.570 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : openblas-0.3.29-1.el9.x86_64 57/111 2026-04-01T03:00:02.573 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : flexiblas-3.0.4-8.el9.0.1.x86_64 58/111 2026-04-01T03:00:02.576 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-ply-3.11-14.el9.0.1.noarch 59/111 2026-04-01T03:00:02.579 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-idna-2.10-7.el9_4.1.noarch 60/111 2026-04-01T03:00:02.585 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-pysocks-1.7.1-12.el9.0.1.noarch 61/111 2026-04-01T03:00:02.589 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-pyasn1-0.4.8-7.el9_7.noarch 62/111 2026-04-01T03:00:02.594 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-cachetools-4.2.4-1.el9.noarch 63/111 2026-04-01T03:00:02.603 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-chardet-4.0.0-5.el9.noarch 64/111 2026-04-01T03:00:02.609 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-oauthlib-3.1.1-5.el9.noarch 65/111 2026-04-01T03:00:02.612 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-websocket-client-1.2.3-2.el9.noarch 66/111 2026-04-01T03:00:02.614 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-certifi-2023.05.07-4.el9.noarch 67/111 2026-04-01T03:00:02.617 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-repoze-lru-0.7-16.el9.noarch 68/111 2026-04-01T03:00:02.619 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-jaraco-8.2.1-3.el9.noarch 69/111 2026-04-01T03:00:02.622 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-more-itertools-8.12.0-2.el9.noarch 70/111 2026-04-01T03:00:02.625 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-toml-0.10.2-6.el9.0.1.noarch 71/111 2026-04-01T03:00:02.628 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-pytz-2021.1-5.el9.noarch 72/111 2026-04-01T03:00:02.631 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-pyparsing-2.4.7-9.el9.0.1.noarch 73/111 2026-04-01T03:00:02.639 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-backports-tarfile-1.2.0-1.el9.noarch 74/111 2026-04-01T03:00:02.644 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-devel-3.9.23-2.el9.x86_64 75/111 2026-04-01T03:00:02.646 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-jsonpointer-2.0-4.el9.0.1.noarch 76/111 2026-04-01T03:00:02.650 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-typing-extensions-4.15.0-1.el9.noarch 77/111 2026-04-01T03:00:02.653 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-isodate-0.6.1-3.el9.noarch 78/111 2026-04-01T03:00:02.655 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-autocommand-2.2.2-8.el9.noarch 79/111 2026-04-01T03:00:02.661 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : grpc-data-1.46.7-10.el9.noarch 80/111 2026-04-01T03:00:02.665 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-protobuf-3.14.0-17.el9_7.noarch 81/111 2026-04-01T03:00:02.668 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-zc-lockfile-2.0-10.el9.noarch 82/111 2026-04-01T03:00:02.671 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-natsort-7.1.1-5.el9.noarch 83/111 2026-04-01T03:00:02.673 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-grafana-dashboards-2:20.2.0-8.g0597158282e. 84/111 2026-04-01T03:00:02.675 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-prometheus-alerts-2:20.2.0-8.g0597158282e.e 85/111 2026-04-01T03:00:02.694 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 86/111 2026-04-01T03:00:02.694 INFO:teuthology.orchestra.run.vm03.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph.target". 2026-04-01T03:00:02.694 INFO:teuthology.orchestra.run.vm03.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-crash.service". 2026-04-01T03:00:02.694 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:02.699 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:02.701 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 86/111 2026-04-01T03:00:02.701 INFO:teuthology.orchestra.run.vm03.stdout:warning: file /etc/logrotate.d/ceph: remove failed: No such file or directory 2026-04-01T03:00:02.701 INFO:teuthology.orchestra.run.vm03.stdout: 2026-04-01T03:00:02.726 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-base-2:20.2.0-8.g0597158282e.el9.clyso.x86_ 86/111 2026-04-01T03:00:02.726 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 87/111 2026-04-01T03:00:02.820 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:02.836 DEBUG:teuthology.orchestra.run.vm06:> sudo yum clean all 2026-04-01T03:00:02.943 INFO:teuthology.orchestra.run.vm06.stderr:[Errno 28] No space left on device: '/var/cache/dnf/metadata_lock.pid' 2026-04-01T03:00:02.960 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T03:00:02.961 ERROR:teuthology.run_tasks:Manager failed: install Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/task/install/__init__.py", line 220, in install yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 32, in nested yield vars File "/home/teuthos/kshtsk/teuthology/teuthology/task/install/__init__.py", line 644, in task yield File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 160, in run_tasks suppress = manager.__exit__(*exc_info) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 552, in task with contextutil.nested(*subtasks): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 54, in nested raise exc[1] File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 364, in create_pools yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 46, in nested if exit(*exc): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 269, in start_rgw rgwadmin(ctx, client, cmd=['gc', 'process', '--include-all'], check_status=True) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/util/rgw.py", line 34, in rgwadmin proc = remote.run( File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph gc process --include-all' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 160, in run_tasks suppress = manager.__exit__(*exc_info) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/kshtsk/teuthology/teuthology/task/install/__init__.py", line 640, in task with contextutil.nested( File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 54, in nested raise exc[1] File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 46, in nested if exit(*exc): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/kshtsk/teuthology/teuthology/task/install/__init__.py", line 222, in install remove_packages(ctx, config, package_list) File "/home/teuthos/kshtsk/teuthology/teuthology/task/install/__init__.py", line 103, in remove_packages with parallel() as p: File "/home/teuthos/kshtsk/teuthology/teuthology/parallel.py", line 84, in __exit__ for result in self: File "/home/teuthos/kshtsk/teuthology/teuthology/parallel.py", line 98, in __next__ resurrect_traceback(result) File "/home/teuthos/kshtsk/teuthology/teuthology/parallel.py", line 30, in resurrect_traceback raise exc.exc_info[1] File "/home/teuthos/kshtsk/teuthology/teuthology/parallel.py", line 23, in capture_traceback return func(*args, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/task/install/rpm.py", line 43, in _remove remote.run(args='sudo yum clean all') File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm06 with status 1: 'sudo yum clean all' 2026-04-01T03:00:02.961 DEBUG:teuthology.run_tasks:Unwinding manager clock 2026-04-01T03:00:02.963 INFO:teuthology.task.clock:Checking final clock skew... 2026-04-01T03:00:02.964 DEBUG:teuthology.orchestra.run.vm03:> PATH=/usr/bin:/usr/sbin ntpq -p || PATH=/usr/bin:/usr/sbin chronyc sources || true 2026-04-01T03:00:02.965 DEBUG:teuthology.orchestra.run.vm06:> PATH=/usr/bin:/usr/sbin ntpq -p || PATH=/usr/bin:/usr/sbin chronyc sources || true 2026-04-01T03:00:02.967 DEBUG:teuthology.orchestra.run.vm08:> PATH=/usr/bin:/usr/sbin ntpq -p || PATH=/usr/bin:/usr/sbin chronyc sources || true 2026-04-01T03:00:02.979 INFO:teuthology.orchestra.run.vm03.stderr:bash: line 1: ntpq: command not found 2026-04-01T03:00:02.981 INFO:teuthology.orchestra.run.vm06.stderr:bash: line 1: ntpq: command not found 2026-04-01T03:00:02.983 INFO:teuthology.orchestra.run.vm03.stdout:MS Name/IP address Stratum Poll Reach LastRx Last sample 2026-04-01T03:00:02.983 INFO:teuthology.orchestra.run.vm03.stdout:=============================================================================== 2026-04-01T03:00:02.983 INFO:teuthology.orchestra.run.vm03.stdout:^- cloudrouter.1in1.net 2 7 377 75 +831us[ +831us] +/- 38ms 2026-04-01T03:00:02.983 INFO:teuthology.orchestra.run.vm03.stdout:^* netcup01.theravenhub.com 2 8 377 211 +923us[ +934us] +/- 18ms 2026-04-01T03:00:02.983 INFO:teuthology.orchestra.run.vm03.stdout:^+ hfu.ovh 3 6 377 21 -1090us[-1090us] +/- 26ms 2026-04-01T03:00:02.983 INFO:teuthology.orchestra.run.vm03.stdout:^- static-217-115-11-162.in> 2 8 377 16 -2103us[-2103us] +/- 42ms 2026-04-01T03:00:02.984 INFO:teuthology.orchestra.run.vm08.stderr:bash: line 1: ntpq: command not found 2026-04-01T03:00:02.984 INFO:teuthology.orchestra.run.vm06.stdout:MS Name/IP address Stratum Poll Reach LastRx Last sample 2026-04-01T03:00:02.984 INFO:teuthology.orchestra.run.vm06.stdout:=============================================================================== 2026-04-01T03:00:02.984 INFO:teuthology.orchestra.run.vm06.stdout:^+ 139-162-156-95.ip.linode> 2 7 377 3 +1664us[+1664us] +/- 33ms 2026-04-01T03:00:02.984 INFO:teuthology.orchestra.run.vm06.stdout:^+ netcup01.theravenhub.com 2 8 377 22 +966us[ +966us] +/- 18ms 2026-04-01T03:00:02.984 INFO:teuthology.orchestra.run.vm06.stdout:^* node-4.infogral.is 2 7 377 68 -1634us[-1613us] +/- 14ms 2026-04-01T03:00:02.984 INFO:teuthology.orchestra.run.vm06.stdout:^+ hfu.ovh 3 7 377 88 -589us[ -568us] +/- 27ms 2026-04-01T03:00:03.072 INFO:teuthology.orchestra.run.vm08.stdout:MS Name/IP address Stratum Poll Reach LastRx Last sample 2026-04-01T03:00:03.072 INFO:teuthology.orchestra.run.vm08.stdout:=============================================================================== 2026-04-01T03:00:03.072 INFO:teuthology.orchestra.run.vm08.stdout:^* node-4.infogral.is 2 8 377 1 -1446us[-1533us] +/- 15ms 2026-04-01T03:00:03.072 INFO:teuthology.orchestra.run.vm08.stdout:^+ netcup01.theravenhub.com 2 8 377 24 +1052us[ +964us] +/- 18ms 2026-04-01T03:00:03.072 INFO:teuthology.orchestra.run.vm08.stdout:^+ hfu.ovh 3 6 377 18 -721us[ -809us] +/- 26ms 2026-04-01T03:00:03.072 INFO:teuthology.orchestra.run.vm08.stdout:^+ 139-162-156-95.ip.linode> 2 7 377 66 +2349us[+2261us] +/- 32ms 2026-04-01T03:00:03.072 DEBUG:teuthology.run_tasks:Unwinding manager ansible.cephlab 2026-04-01T03:00:03.075 INFO:teuthology.task.ansible:Skipping ansible cleanup... 2026-04-01T03:00:03.075 DEBUG:teuthology.run_tasks:Unwinding manager selinux 2026-04-01T03:00:03.077 DEBUG:teuthology.run_tasks:Unwinding manager pcp 2026-04-01T03:00:03.079 DEBUG:teuthology.run_tasks:Unwinding manager internal.timer 2026-04-01T03:00:03.081 INFO:teuthology.task.internal:Duration was 2349.881136 seconds 2026-04-01T03:00:03.081 DEBUG:teuthology.run_tasks:Unwinding manager internal.syslog 2026-04-01T03:00:03.084 INFO:teuthology.task.internal.syslog:Shutting down syslog monitoring... 2026-04-01T03:00:03.084 DEBUG:teuthology.orchestra.run.vm03:> sudo rm -f -- /etc/rsyslog.d/80-cephtest.conf && sudo service rsyslog restart 2026-04-01T03:00:03.085 DEBUG:teuthology.orchestra.run.vm06:> sudo rm -f -- /etc/rsyslog.d/80-cephtest.conf && sudo service rsyslog restart 2026-04-01T03:00:03.086 DEBUG:teuthology.orchestra.run.vm08:> sudo rm -f -- /etc/rsyslog.d/80-cephtest.conf && sudo service rsyslog restart 2026-04-01T03:00:03.120 INFO:teuthology.orchestra.run.vm03.stderr:Redirecting to /bin/systemctl restart rsyslog.service 2026-04-01T03:00:03.128 INFO:teuthology.orchestra.run.vm06.stderr:Redirecting to /bin/systemctl restart rsyslog.service 2026-04-01T03:00:03.152 INFO:teuthology.orchestra.run.vm08.stderr:Redirecting to /bin/systemctl restart rsyslog.service 2026-04-01T03:00:03.650 INFO:teuthology.task.internal.syslog:Checking logs for errors... 2026-04-01T03:00:03.650 DEBUG:teuthology.task.internal.syslog:Checking ubuntu@vm03.local 2026-04-01T03:00:03.650 DEBUG:teuthology.orchestra.run.vm03:> grep -E --binary-files=text '\bBUG\b|\bINFO\b|\bDEADLOCK\b' /home/ubuntu/cephtest/archive/syslog/kern.log | grep -v 'task .* blocked for more than .* seconds' | grep -v 'lockdep is turned off' | grep -v 'trying to register non-static key' | grep -v 'DEBUG: fsize' | grep -v CRON | grep -v 'BUG: bad unlock balance detected' | grep -v 'inconsistent lock state' | grep -v '*** DEADLOCK ***' | grep -v 'INFO: possible irq lock inversion dependency detected' | grep -v 'INFO: NMI handler (perf_event_nmi_handler) took too long to run' | grep -v 'INFO: recovery required on readonly' | grep -v 'ceph-create-keys: INFO' | grep -v INFO:ceph-create-keys | grep -v 'Loaded datasource DataSourceOpenStack' | grep -v 'container-storage-setup: INFO: Volume group backing root filesystem could not be determined' | grep -E -v '\bsalt-master\b|\bsalt-minion\b|\bsalt-api\b' | grep -v ceph-crash | grep -E -v '\btcmu-runner\b.*\bINFO\b' | head -n 1 2026-04-01T03:00:03.674 DEBUG:teuthology.task.internal.syslog:Checking ubuntu@vm06.local 2026-04-01T03:00:03.674 DEBUG:teuthology.orchestra.run.vm06:> grep -E --binary-files=text '\bBUG\b|\bINFO\b|\bDEADLOCK\b' /home/ubuntu/cephtest/archive/syslog/kern.log | grep -v 'task .* blocked for more than .* seconds' | grep -v 'lockdep is turned off' | grep -v 'trying to register non-static key' | grep -v 'DEBUG: fsize' | grep -v CRON | grep -v 'BUG: bad unlock balance detected' | grep -v 'inconsistent lock state' | grep -v '*** DEADLOCK ***' | grep -v 'INFO: possible irq lock inversion dependency detected' | grep -v 'INFO: NMI handler (perf_event_nmi_handler) took too long to run' | grep -v 'INFO: recovery required on readonly' | grep -v 'ceph-create-keys: INFO' | grep -v INFO:ceph-create-keys | grep -v 'Loaded datasource DataSourceOpenStack' | grep -v 'container-storage-setup: INFO: Volume group backing root filesystem could not be determined' | grep -E -v '\bsalt-master\b|\bsalt-minion\b|\bsalt-api\b' | grep -v ceph-crash | grep -E -v '\btcmu-runner\b.*\bINFO\b' | head -n 1 2026-04-01T03:00:03.696 DEBUG:teuthology.task.internal.syslog:Checking ubuntu@vm08.local 2026-04-01T03:00:03.697 DEBUG:teuthology.orchestra.run.vm08:> grep -E --binary-files=text '\bBUG\b|\bINFO\b|\bDEADLOCK\b' /home/ubuntu/cephtest/archive/syslog/kern.log | grep -v 'task .* blocked for more than .* seconds' | grep -v 'lockdep is turned off' | grep -v 'trying to register non-static key' | grep -v 'DEBUG: fsize' | grep -v CRON | grep -v 'BUG: bad unlock balance detected' | grep -v 'inconsistent lock state' | grep -v '*** DEADLOCK ***' | grep -v 'INFO: possible irq lock inversion dependency detected' | grep -v 'INFO: NMI handler (perf_event_nmi_handler) took too long to run' | grep -v 'INFO: recovery required on readonly' | grep -v 'ceph-create-keys: INFO' | grep -v INFO:ceph-create-keys | grep -v 'Loaded datasource DataSourceOpenStack' | grep -v 'container-storage-setup: INFO: Volume group backing root filesystem could not be determined' | grep -E -v '\bsalt-master\b|\bsalt-minion\b|\bsalt-api\b' | grep -v ceph-crash | grep -E -v '\btcmu-runner\b.*\bINFO\b' | head -n 1 2026-04-01T03:00:03.721 INFO:teuthology.task.internal.syslog:Gathering journactl... 2026-04-01T03:00:03.721 DEBUG:teuthology.orchestra.run.vm03:> sudo journalctl > /home/ubuntu/cephtest/archive/syslog/journalctl.log 2026-04-01T03:00:03.723 DEBUG:teuthology.orchestra.run.vm06:> sudo journalctl > /home/ubuntu/cephtest/archive/syslog/journalctl.log 2026-04-01T03:00:03.738 DEBUG:teuthology.orchestra.run.vm08:> sudo journalctl > /home/ubuntu/cephtest/archive/syslog/journalctl.log 2026-04-01T03:00:03.754 INFO:teuthology.orchestra.run.vm06.stderr:bash: line 1: /home/ubuntu/cephtest/archive/syslog/journalctl.log: No space left on device 2026-04-01T03:00:03.878 INFO:teuthology.orchestra.run.vm03.stdout: Running scriptlet: ceph-common-2:20.2.0-8.g0597158282e.el9.clyso.x8 87/111 2026-04-01T03:00:03.885 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : qatzip-libs-1.3.1-1.el9.x86_64 88/111 2026-04-01T03:00:03.888 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-ceph-common-2:20.2.0-8.g0597158282e.el9. 89/111 2026-04-01T03:00:03.890 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : python3-prettytable-0.7.2-27.el9.0.1.noarch 90/111 2026-04-01T03:00:03.890 INFO:teuthology.orchestra.run.vm03.stdout: Erasing : ceph-selinux-2:20.2.0-8.g0597158282e.el9.clyso.x 91/111 2026-04-01T03:00:04.161 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T03:00:04.161 ERROR:teuthology.run_tasks:Manager failed: internal.syslog Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/task/internal/syslog.py", line 76, in syslog yield File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 160, in run_tasks suppress = manager.__exit__(*exc_info) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 552, in task with contextutil.nested(*subtasks): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 54, in nested raise exc[1] File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 364, in create_pools yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 46, in nested if exit(*exc): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 269, in start_rgw rgwadmin(ctx, client, cmd=['gc', 'process', '--include-all'], check_status=True) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/util/rgw.py", line 34, in rgwadmin proc = remote.run( File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph gc process --include-all' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 160, in run_tasks suppress = manager.__exit__(*exc_info) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/kshtsk/teuthology/teuthology/task/internal/syslog.py", line 163, in syslog run.wait( File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 485, in wait proc.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm06 with status 1: 'sudo journalctl > /home/ubuntu/cephtest/archive/syslog/journalctl.log' 2026-04-01T03:00:04.161 DEBUG:teuthology.run_tasks:Unwinding manager internal.sudo 2026-04-01T03:00:04.164 INFO:teuthology.task.internal:Restoring /etc/sudoers... 2026-04-01T03:00:04.164 DEBUG:teuthology.orchestra.run.vm03:> sudo mv -f /etc/sudoers.orig.teuthology /etc/sudoers 2026-04-01T03:00:04.190 DEBUG:teuthology.orchestra.run.vm06:> sudo mv -f /etc/sudoers.orig.teuthology /etc/sudoers 2026-04-01T03:00:04.218 DEBUG:teuthology.orchestra.run.vm08:> sudo mv -f /etc/sudoers.orig.teuthology /etc/sudoers 2026-04-01T03:00:04.240 DEBUG:teuthology.run_tasks:Unwinding manager internal.coredump 2026-04-01T03:00:04.243 DEBUG:teuthology.orchestra.run.vm03:> sudo sysctl -w kernel.core_pattern=core && sudo bash -c 'for f in `find /home/ubuntu/cephtest/archive/coredump -type f`; do file $f | grep -q systemd-sysusers && rm $f || true ; done' && rmdir --ignore-fail-on-non-empty -- /home/ubuntu/cephtest/archive/coredump 2026-04-01T03:00:04.244 DEBUG:teuthology.orchestra.run.vm06:> sudo sysctl -w kernel.core_pattern=core && sudo bash -c 'for f in `find /home/ubuntu/cephtest/archive/coredump -type f`; do file $f | grep -q systemd-sysusers && rm $f || true ; done' && rmdir --ignore-fail-on-non-empty -- /home/ubuntu/cephtest/archive/coredump 2026-04-01T03:00:04.259 DEBUG:teuthology.orchestra.run.vm08:> sudo sysctl -w kernel.core_pattern=core && sudo bash -c 'for f in `find /home/ubuntu/cephtest/archive/coredump -type f`; do file $f | grep -q systemd-sysusers && rm $f || true ; done' && rmdir --ignore-fail-on-non-empty -- /home/ubuntu/cephtest/archive/coredump 2026-04-01T03:00:04.269 INFO:teuthology.orchestra.run.vm03.stdout:kernel.core_pattern = core 2026-04-01T03:00:04.282 INFO:teuthology.orchestra.run.vm06.stdout:kernel.core_pattern = core 2026-04-01T03:00:04.305 INFO:teuthology.orchestra.run.vm08.stdout:kernel.core_pattern = core 2026-04-01T03:00:04.319 DEBUG:teuthology.orchestra.run.vm03:> test -e /home/ubuntu/cephtest/archive/coredump 2026-04-01T03:00:04.342 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T03:00:04.342 DEBUG:teuthology.orchestra.run.vm06:> test -e /home/ubuntu/cephtest/archive/coredump 2026-04-01T03:00:04.357 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T03:00:04.357 DEBUG:teuthology.orchestra.run.vm08:> test -e /home/ubuntu/cephtest/archive/coredump 2026-04-01T03:00:04.375 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T03:00:04.375 DEBUG:teuthology.run_tasks:Unwinding manager internal.archive 2026-04-01T03:00:04.378 INFO:teuthology.task.internal:Transferring archived files... 2026-04-01T03:00:04.378 DEBUG:teuthology.misc:Transferring archived files from vm03:/home/ubuntu/cephtest/archive to /archive/supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps/4640/remote/vm03 2026-04-01T03:00:04.378 DEBUG:teuthology.orchestra.run.vm03:> sudo tar c -f - -C /home/ubuntu/cephtest/archive -- . 2026-04-01T03:00:04.600 DEBUG:teuthology.misc:Transferring archived files from vm06:/home/ubuntu/cephtest/archive to /archive/supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps/4640/remote/vm06 2026-04-01T03:00:04.600 DEBUG:teuthology.orchestra.run.vm06:> sudo tar c -f - -C /home/ubuntu/cephtest/archive -- . 2026-04-01T03:00:04.624 DEBUG:teuthology.misc:Transferring archived files from vm08:/home/ubuntu/cephtest/archive to /archive/supriti-2026-03-31_23:51:22-rgw-wip-sse-s3-on-v20.2.0-none-default-vps/4640/remote/vm08 2026-04-01T03:00:04.624 DEBUG:teuthology.orchestra.run.vm08:> sudo tar c -f - -C /home/ubuntu/cephtest/archive -- . 2026-04-01T03:00:04.787 INFO:teuthology.task.internal:Removing archive directory... 2026-04-01T03:00:04.787 DEBUG:teuthology.orchestra.run.vm03:> rm -rf -- /home/ubuntu/cephtest/archive 2026-04-01T03:00:04.789 DEBUG:teuthology.orchestra.run.vm06:> rm -rf -- /home/ubuntu/cephtest/archive 2026-04-01T03:00:04.790 DEBUG:teuthology.orchestra.run.vm08:> rm -rf -- /home/ubuntu/cephtest/archive 2026-04-01T03:00:04.844 DEBUG:teuthology.run_tasks:Unwinding manager internal.archive_upload 2026-04-01T03:00:04.847 INFO:teuthology.task.internal:Not uploading archives. 2026-04-01T03:00:04.847 DEBUG:teuthology.run_tasks:Unwinding manager internal.base 2026-04-01T03:00:04.849 INFO:teuthology.task.internal:Tidying up after the test... 2026-04-01T03:00:04.850 DEBUG:teuthology.orchestra.run.vm03:> find /home/ubuntu/cephtest -ls ; rmdir -- /home/ubuntu/cephtest 2026-04-01T03:00:04.851 DEBUG:teuthology.orchestra.run.vm06:> find /home/ubuntu/cephtest -ls ; rmdir -- /home/ubuntu/cephtest 2026-04-01T03:00:04.852 DEBUG:teuthology.orchestra.run.vm08:> find /home/ubuntu/cephtest -ls ; rmdir -- /home/ubuntu/cephtest 2026-04-01T03:00:04.866 INFO:teuthology.orchestra.run.vm06.stdout: 83886251 0 drwxr-xr-x 3 ubuntu ubuntu 76 Apr 1 03:00 /home/ubuntu/cephtest 2026-04-01T03:00:04.866 INFO:teuthology.orchestra.run.vm06.stdout: 58721128 0 drwxr-xr-x 2 ubuntu ubuntu 6 Apr 1 02:24 /home/ubuntu/cephtest/ceph.data 2026-04-01T03:00:04.866 INFO:teuthology.orchestra.run.vm06.stdout: 83890472 4 -rw-r--r-- 1 ceph root 20 Apr 1 02:24 /home/ubuntu/cephtest/url_file 2026-04-01T03:00:04.866 INFO:teuthology.orchestra.run.vm06.stdout: 83890473 0 srwxr-xr-x 1 root root 0 Apr 1 02:24 /home/ubuntu/cephtest/rgw.opslog.ceph.client.1.sock 2026-04-01T03:00:04.867 INFO:teuthology.orchestra.run.vm06.stderr:rmdir: failed to remove '/home/ubuntu/cephtest': Directory not empty 2026-04-01T03:00:04.867 INFO:teuthology.orchestra.run.vm03.stdout: 83886506 0 drwxr-xr-x 3 ubuntu ubuntu 23 Apr 1 03:00 /home/ubuntu/cephtest 2026-04-01T03:00:04.867 INFO:teuthology.orchestra.run.vm03.stdout: 58721128 0 drwxr-xr-x 2 ubuntu ubuntu 6 Apr 1 02:24 /home/ubuntu/cephtest/ceph.data 2026-04-01T03:00:04.868 INFO:teuthology.orchestra.run.vm03.stderr:rmdir: failed to remove '/home/ubuntu/cephtest': Directory not empty 2026-04-01T03:00:04.886 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-04-01T03:00:04.886 ERROR:teuthology.run_tasks:Manager failed: internal.base Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/task/internal/__init__.py", line 48, in base yield File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 160, in run_tasks suppress = manager.__exit__(*exc_info) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 552, in task with contextutil.nested(*subtasks): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 54, in nested raise exc[1] File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 364, in create_pools yield File "/home/teuthos/kshtsk/teuthology/teuthology/contextutil.py", line 46, in nested if exit(*exc): File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 142, in __exit__ next(self.gen) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/rgw.py", line 269, in start_rgw rgwadmin(ctx, client, cmd=['gc', 'process', '--include-all'], check_status=True) File "/home/teuthos/src/git.local_ceph_99e8bef8f767b591604d6078b7861a00c2936d53/qa/tasks/util/rgw.py", line 34, in rgwadmin proc = remote.run( File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/remote.py", line 596, in run r = self._runner(client=self.ssh, name=self.shortname, **kwargs) File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 461, in run r.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph gc process --include-all' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/teuthos/kshtsk/teuthology/teuthology/run_tasks.py", line 160, in run_tasks suppress = manager.__exit__(*exc_info) File "/home/teuthos/.local/share/uv/python/cpython-3.10.19-linux-x86_64-gnu/lib/python3.10/contextlib.py", line 153, in __exit__ self.gen.throw(typ, value, traceback) File "/home/teuthos/kshtsk/teuthology/teuthology/task/internal/__init__.py", line 53, in base run.wait( File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 485, in wait proc.wait() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 161, in wait self._raise_for_status() File "/home/teuthos/kshtsk/teuthology/teuthology/orchestra/run.py", line 181, in _raise_for_status raise CommandFailedError( teuthology.exceptions.CommandFailedError: Command failed on vm03 with status 1: 'find /home/ubuntu/cephtest -ls ; rmdir -- /home/ubuntu/cephtest' 2026-04-01T03:00:04.886 DEBUG:teuthology.run_tasks:Unwinding manager console_log 2026-04-01T03:00:04.889 DEBUG:teuthology.run_tasks:Exception was not quenched, exiting: CommandFailedError: Command failed on vm03 with status 1: 'adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin --log-to-stderr --format json -n client.0 --cluster ceph gc process --include-all' 2026-04-01T03:00:04.890 INFO:teuthology.run:Summary data: description: rgw/dedup/{beast bluestore-bitmap fixed-3-rgw ignore-pg-availability overrides supported-distros/{rocky_latest} tasks/{0-install test_dedup}} duration: 2349.881136417389 failure_reason: 'Command failed on vm03 with status 1: ''adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage radosgw-admin -n client.0 user rm --uid foo.client.0 --purge-data --cluster ceph''' flavor: default owner: supriti sentry_event: null status: fail success: false 2026-04-01T03:00:04.890 DEBUG:teuthology.report:Pushing job info to http://localhost:8080 2026-04-01T03:00:04.901 INFO:teuthology.orchestra.run.vm08.stdout: 83886961 0 drwxr-xr-x 3 ubuntu ubuntu 95 Apr 1 03:00 /home/ubuntu/cephtest 2026-04-01T03:00:04.902 INFO:teuthology.orchestra.run.vm08.stdout: 58721128 0 drwxr-xr-x 2 ubuntu ubuntu 6 Apr 1 02:24 /home/ubuntu/cephtest/ceph.data 2026-04-01T03:00:04.902 INFO:teuthology.orchestra.run.vm08.stdout: 83887390 4 -rw-r--r-- 1 ubuntu ubuntu 409 Apr 1 02:24 /home/ubuntu/cephtest/ceph.monmap 2026-04-01T03:00:04.902 INFO:teuthology.orchestra.run.vm08.stdout: 83887391 4 -rw-r--r-- 1 ceph root 20 Apr 1 02:24 /home/ubuntu/cephtest/url_file 2026-04-01T03:00:04.902 INFO:teuthology.orchestra.run.vm08.stdout: 83887392 0 srwxr-xr-x 1 root root 0 Apr 1 02:24 /home/ubuntu/cephtest/rgw.opslog.ceph.client.2.sock 2026-04-01T03:00:04.902 INFO:teuthology.orchestra.run.vm08.stderr:rmdir: failed to remove '/home/ubuntu/cephtest': Directory not empty 2026-04-01T03:00:04.910 INFO:teuthology.run:FAIL